hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
973dc22dcea57170eb016b66956523d869b11b62
| 144
|
py
|
Python
|
docs/00.Python/demo_Chapter10/privateVariable.py
|
wan230114-dev/PythonNote
|
180e0908a647d22baf1354153c87da8c0cd67548
|
[
"Apache-2.0"
] | 2
|
2020-04-09T05:56:23.000Z
|
2021-03-25T18:42:36.000Z
|
docs/00.Python/demo_Chapter10/privateVariable.py
|
wan230114-dev/PythonNote
|
180e0908a647d22baf1354153c87da8c0cd67548
|
[
"Apache-2.0"
] | 22
|
2020-04-09T06:09:14.000Z
|
2021-01-06T01:05:32.000Z
|
docs/00.Python/demo_Chapter10/privateVariable.py
|
wan230114-dev/PythonNote
|
180e0908a647d22baf1354153c87da8c0cd67548
|
[
"Apache-2.0"
] | 6
|
2020-03-09T07:19:21.000Z
|
2021-01-05T23:23:42.000Z
|
print("主程序中:", dir())
from privateVariable_pkg import aaa
print("主程序中:", dir())
# import privateVariable_pkg
# print(dir(privateVariable_pkg))
| 20.571429
| 35
| 0.75
| 18
| 144
| 5.833333
| 0.444444
| 0.514286
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 144
| 6
| 36
| 24
| 0.807692
| 0.402778
| 0
| 0.666667
| 0
| 0
| 0.120482
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
9767d57d6b9c27fa857fad7d3b3f01563409b573
| 3,671
|
py
|
Python
|
myproject/core/tests/test_views_person.py
|
rg3915/django-example
|
67e82767a748f9ab1ae23079f2b34b86ce77cf06
|
[
"MIT"
] | 2
|
2015-10-24T10:50:36.000Z
|
2019-02-05T18:31:30.000Z
|
myproject/core/tests/test_views_person.py
|
rg3915/django-example
|
67e82767a748f9ab1ae23079f2b34b86ce77cf06
|
[
"MIT"
] | null | null | null |
myproject/core/tests/test_views_person.py
|
rg3915/django-example
|
67e82767a748f9ab1ae23079f2b34b86ce77cf06
|
[
"MIT"
] | 2
|
2018-06-27T14:28:27.000Z
|
2019-02-03T17:36:18.000Z
|
from django.test import TestCase
from django.core.urlresolvers import reverse as r
class PersonTest(TestCase):
def setUp(self):
self.resp = self.client.get(r('person_add'))
def test_get(self):
'GET /person/add/ must return status code 200.'
self.assertEqual(200, self.resp.status_code)
def test_template(self):
'Response should be a rendered template.'
self.assertTemplateUsed(
self.resp, 'core/person/person_create_form.html')
def test_html(self):
'Html must contain input controls.'
self.assertContains(self.resp, '<form')
self.assertContains(self.resp, '<input', 8)
self.assertContains(self.resp, 'type="text"', 4)
self.assertContains(self.resp, 'type="email"')
self.assertContains(self.resp, 'type="submit"')
def test_csrf(self):
'Html must contain csrf token.'
self.assertContains(self.resp, 'csrfmiddlewaretoken')
# Não precisa deste teste em CreateView?
# def test_has_form(self):
# 'Context must have the person form.'
# form = self.resp.context['form']
# self.assertIsInstance(form, PersonForm)
class OccupationTest(TestCase):
def setUp(self):
self.resp = self.client.get(r('occupation_add'))
def test_get(self):
'GET /occupation/add/ must return status code 200.'
self.assertEqual(200, self.resp.status_code)
def test_template(self):
'Response should be a rendered template.'
self.assertTemplateUsed(
self.resp, 'core/person/occupation_create_form.html')
def test_html(self):
'Html must contain input controls.'
self.assertContains(self.resp, '<form')
self.assertContains(self.resp, '<input', 2)
self.assertContains(self.resp, 'type="text"', 1)
self.assertContains(self.resp, 'type="submit"')
def test_csrf(self):
'Html must contain csrf token.'
self.assertContains(self.resp, 'csrfmiddlewaretoken')
class AddressTest(TestCase):
def setUp(self):
self.resp = self.client.get(r('address_add'))
def test_get(self):
'GET /address/add/ must return status code 200.'
self.assertEqual(200, self.resp.status_code)
def test_template(self):
'Response should be a rendered template.'
self.assertTemplateUsed(
self.resp, 'core/person/address_create_form.html')
def test_html(self):
'Html must contain input controls.'
self.assertContains(self.resp, '<form')
self.assertContains(self.resp, '<input', 7)
self.assertContains(self.resp, 'type="text"', 5)
self.assertContains(self.resp, 'type="submit"')
def test_csrf(self):
'Html must contain csrf token.'
self.assertContains(self.resp, 'csrfmiddlewaretoken')
class PhoneTest(TestCase):
def setUp(self):
self.resp = self.client.get(r('phone_add'))
def test_get(self):
'GET /phone/add/ must return status code 200.'
self.assertEqual(200, self.resp.status_code)
def test_template(self):
'Response should be a rendered template.'
self.assertTemplateUsed(
self.resp, 'core/person/phone_create_form.html')
def test_html(self):
'Html must contain input controls.'
self.assertContains(self.resp, '<form')
self.assertContains(self.resp, '<input', 2)
self.assertContains(self.resp, 'type="text"', 1)
self.assertContains(self.resp, 'type="submit"')
def test_csrf(self):
'Html must contain csrf token.'
self.assertContains(self.resp, 'csrfmiddlewaretoken')
| 32.201754
| 65
| 0.64887
| 445
| 3,671
| 5.276404
| 0.159551
| 0.115843
| 0.196763
| 0.232538
| 0.850085
| 0.837308
| 0.774276
| 0.774276
| 0.774276
| 0.774276
| 0
| 0.011291
| 0.228003
| 3,671
| 113
| 66
| 32.486726
| 0.817219
| 0.2152
| 0
| 0.708861
| 0
| 0
| 0.288506
| 0.041379
| 0
| 0
| 0
| 0
| 0.367089
| 1
| 0.253165
| false
| 0
| 0.025316
| 0
| 0.329114
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9776a953f9f653f74b9fb8df2a704a4330047c05
| 214,598
|
py
|
Python
|
DARKspam_2/darkspam.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
DARKspam_2/darkspam.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
DARKspam_2/darkspam.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
#Encrypted By xNot_Found
#Github : https://github.com/hatakecnk/
#Do Not Edit The Script To Avoid Errors
import marshal, zlib
exec(marshal.loads(zlib.decompress('x\x9c\xb4wc\x8f.\x0c\xb0\xe4\xd8>c\xdb\xb6m\xdb\xb6y\xc6\xb6m\xdb\xb6\xf9\x8cm\xdb:c\xf3\xbew\xb3\xd9\x7f\xb0\x9d\xeaJ\'\x9dt}j\x99\x00\xfd_\x03\xfd\xcf\x05\xffs\x17\xf6\xff\xc8\xf4?\x00\x03\xd9\x02\x01i\xff\xbf\x18\x18H\x1b\x18\xc8\x0c\x08\xc8\x1a\x04\xc8\x0c\x18\xc8\x1a\x14\xc8\x14\x04(\x18\xf8\x7f\xf1_\x16L\xed?R\xa1\xfc\xdf\x1aV\xbf\xff\x99\xbc\x8b\x1210\x90G\x0e\x10\x9b\xe0d\xbf\xc9\xff\'\tW\xa2\xff\x95hq7I\xa6\x87\xab9^\x1fu\x19m\xb3\xdb\x1a\xc5\xda\xfa\x8f\xff\x0fp,:nwv6\xfd\x1b*\xe5)\xd4*\xa5\xcfL6\xa1\xce\x7f!\xce\xfd\xa9\xee\x0c\xb8\x1b\xbf%F\x7f\x811\xc7\xb1\xa1$\x1f\x7f\x9f\x07\x04\xb3\x06\x04\xc9\x05\x05\xd3\x08\x05\xc9\x07\x04\xd3\n\x04G\t\x05\x85\x06~\x83\x7f\xcb\x0c\xab~\x13\x99G~\xaf\x1b\xde\xbe\x19\x8es*\xa0\x03\x03\xb7\x9b\x03\xd7\xc1\xc8?b~\x04\xa4(\x91\xbf\xe2E]X,\xf1@q\x89#\xcb\x9f\xd5\xabg~\x81\xe7\xd6\x7f\xe9\xd5\xbd\x9f\x1c\xca\xfaA\x9d\xcbL\x9b\xb2g\xd0\xa3\xa70?\xfb\x13^\xfd(o\x1f\x9aW\xfb\xb1\xfe\xf8\x19\x1a\xfe\'\xf8H\xe0\x1b\x03\xf6\x03\x04\xe2/M\xe8\xff\x97\xd4_\x1a\xc4\xff/\x84\xbf%)p\xd8\xf7\x9e;_\xc08l\xbf\xf3E7\xfe\xa1\xea\x1e\xc3 V\x0e*\xd2\xb2\x1f\x88\xa5\x008 mc\xad\x9e\x0b\xf3b\xae\x1eS\x06\xe3uu\x11\t\xfeG{\xaa\xccn\xea\xdfs\xed\xbf\x9b\x87\xb5|\x86\xfe\xcb\xf0\\\\\xf1j17\x10\x16\xaeN\xadY\xd5\xf0x\xe4\x81\x7f\x0c\x94\xb3\xd9o\x12\xd6\xbaks)/|\xa4w\xc4H\xebo\xe1\x7fH-I\x90$Y\x193\x04pR\xfa\xc6\xb9H\xa1H\x14\x05\x81\xe6\xfb\x0f\x0cp\x14Wn\xff^\x04\xb9\xdb\xaf\xcf\xbdP\xf4\xf7IP\xfaK\x86\xffE\x12\xf9U\xad/\x8cO\xe7Lx\xa5;8\xcer\\\xca\x80\xe1x\xa2\x8e\xcd\\\xbc-\xea\xe4\x8e\x1e#E\xde\x11\xb6K"\xaeR\xbe\xb2J\x0f\x9d\xa8\xb7\xb8\xb8\xc1\x88u5?\xed\xd2\xd0\xec\xa1\xb8\xbbb\x87\xaap}%6\xb2F\xf1\x1f\x1dlB\x1c\xef@\xab\xe8\x90+\xe8@k\xe8\x18\xde\xf7N\x9b[%l\x95\xff\xb8\xd6\xb6\x08GZj}]\xce\xb7)\xc4,PI\xe4|\x10\x05\xb7y\xc1\x98K\xd5x\xf3\xf5\x96\xce=\t\x91A\x8d\x15c\xe2\xddR(\xcec{\xe4H\xcc\xe3\x1bC\xb5\xf9t$\xd0\tNcx\xff\xdc\x1c\xd5\xf8\xd6\x19\xfet\x8dW\xb0\x9a\xd9\xf4\xbe\xecV\xbb\x16\x11\xb0\x06\xf8\xc3\xf2\x176\x0b\x05\xca\x122\xae$\xfc\xca\x06$\xb0\x1f\xdc\x98\xe5\xe3\x1b\x1a\xc8\xfb\xd8\x0b\x17\xb4*\xb8:\xea\x13\xa4u|\xe5zkB\xaae\x0b\r\xfe@ \xdf\xfc\xae_\xff[\x16\xb2x?\x9b\xe1\x07s\x8f\xb9\x9c\xf5kjc\xb6\x07\x97\xb1\x0f\x11\xbf\xce\xc9\x7f\xde\x88Qb\xc82:\xc3\xe3\xcfev\xa4!\xb6tu&\x15\x1c\xb8Q\x08\xd4c\xfec\xdf\xa4B\xae_\x9b\x1f\xcd\x00\x18\xaf\xe1\xdc^\x1d\xf0m\xe4\x03V\xdd\r8\xcc\x1b\xff\xbb\xbb\xbb!\x18\xf9\x8eTH\x95P\xf3\xcc.;X2\xeb\x17\\T^#h\xc6>\xe6\x94\x88xl\x9a\xc1;e\xbc\x80\xcb\x05y\x17\xb6_:??\xd3\x9a\xcf\xad,\x96K\x82\x0c\xd7\xb9\x8d\xad\xaa\x80\x988\xd3r\xe91\x85\xe5I\xf1\xb4k\x84\x1dU\xbc:\x16~\x0e\xfa\xf0\xee\xf3\x93\xdaGy\x0cR\xbeUA\xdb>\xc8\x85\xd8\x89(s\x05\xde\x19He\x0e\xdaR\x0bR\xf7\x91\xbc\xa7\x1a\xc0e<\x05i}F>\x83j\xeeWJ;\xbe\x87<~H\xdd\xc5k\xd5\x12\x9d\x86\xb7\xf0\x89\x9bq\xbc7Y\t\x8d/~6\x1co\xf3\x8ao |\xa2\xf5\xf96[\xf2\xb7\xe3\xdbL\x85{\x04\xf4A%\xc5\xa2\x17\x0f)\x0b%\xbf\xaa\xc8\x9c\xfe\xeb}G\xb6i\xb8\xc3:\xf7_, \x0e\xdd\xbb\xf9\x15\xdb\xdb=Q\x8c?\xd7\xe0I\xba\x87gW\x1e\xb6#hi\xcak\x0fT$\x01\xd2\xa3\xfaU\x0c\x0b\xa2k\x12\x1c\x1cjUI\xed\xf6\xb4`\x15\x99kU\xb6\xee\xc7\xa8*\xfc\xc3\x92\x82\xa7\x7f\xb6\xa7\xd6n=6,\xe3*t\xe5\xb3\xa5\x17\xd1?\r\xb7\xb5\xa4\x81!\xd6JQZ\x00\x06\xc5\xd1/\xa6 S\xf3\x81{\x80bq\xc1WR\x0eM\xda\xdcV\xd2\x11\x92p\xcfUo)\x1f\xb9t\x93c,q\x0f\xff\xad.\x0e\x9eM\x95\x06\x03e\x17\x16*n.qnp\x03h\xe7\x04/W\xea\xf3;r\xcc\x8fv\x14\x8c\xc8\x90Bw\xfc\\\'G\xb0Xd\xa7\x83\xc2rC\xb7\xfe\x02p\x1f\xf7*w&\xc4\x83}\xd2\x1c\x9a\xe3o\xaf\xad\xd5\xc5\xc0\xf5\x95\xc3\x7f\x8c\t,\xd0\xe0\xad\xfd\xc1\xca\x83\xc1I\xd4\x00\xb2\xd5\xc1\xcaO\xc0\xc1\xad\x83\xe0k\xbc>\xcd\x1f\x82U\x9b\x07\xc6\xef\xdb\xd9\xdb\xb8\x8c\xe7\xf7 K\xaf\xae\\\x9dW\x16s\xb9\x9c/\x14\xf7\x1a{\xa5\x16t\xcd\xd6\xd8\xac&\xfd\xc8B\x0e\xb4Y\x15\xaa1X\xdf\x85^\x01\xf9\x97v\xec(\xe6\xc6#\xa2\xaeGH\xa5\xfe\x07\xfc>\x08\x06t\xbe\x1d\xb0\x80\xa8\xcaE\xd4\x11\xc0\xe9\n\xc3\xbf\xba!Z?\xcc\xa2\n#\x1d\xd3w\xf8\xd1R\xa1C#\xf1Sj\xae\xd2\x9a\xb9\xb4\xc6 T\xe2,R\x16\xde\xdf~\xfd@q\xb1\x1c+\xf9\x16\xb9\x81\xd9=z/\x12\xa0@f\x97\xe4\x81\x90\xbe\xa3\x85s\x86\x05Y*\x1cJ\x16\x91V=\xd1}l\x9c\x15\x17\n-\xad\x1a.\'\xae#\xa9\x96"\x1fY\x1c\x06k\xb9IU\xebd\x17\xbb\xd546\xbfY0\xf2\x03\xa6H8\xa0\xe4\x10\xaf\xed\x00\x8e\\)\xaeBH\xbe!f~\x831\xdeq\xdf\x8b\xc9P\xa8\xa0\xfc\x9b0\xa6\x89{\x16\x1e\x85{{\xd68)`\x81\xfa\xdbr\x02C\x9fj#~\xd1\xe2(ae3\xf0\x8c\x96t\x89<-\x81\xfc\xd1\x8fq\x12~|\xc5\xbf\xc0\xb7\x07\x8d\xb932\xa8\xc6\xad\xf7,ze^\xe3\xd4\x91\x10S0\xa3.z\x85\x9bc\x14\xf3\x83\\\x89\xc5\x93;g>\xdb.\x84\xd0\xc9@d\x1a(s\x12\xdeve7>\xa0=7Zb\x16~\x82\xc1\xfa\xfa\xd9l\xa7M9KDF\xe3\xfdc|\x17\x9b\xef\xaf\x14k\xc1\x00\xe1\x9d9\xe6\xecbU\xf4\x0bI\x1a_\x1dH\xcfohL0\xc5e/\x89\x87\xdc\xe9n\x81*\x0e\xabj\x00\xa3Y+\x05\x8d8X\xb4Z\xaf\xae7\xbc\xbfG&T()\xe18\xe22=s\'1\xa4\xad\x1a\xf3\xaa.+\x93\x0e\x8a\xdb\xc71y\xb2m\xb2>&\xde?x\xac\xdc\x8b\xf7\xd7\x99u\xf8\x15e;~7\xf1\x1d=\xcd\xe8\x8d\xb0\x17\xa6{O\xb2\x90\xaf&]+\xa0!\xb9\xbe\xea~\x98\xb8AbU\xd4\x97\x98\x9b\xce\xa1\xa0\x94AA\xfb\xfc\xcc\xa6\x94\xbf\x7f\xf7J\n{\xdbc\x96\xc6z\xfbn\xa4\xec\xe7Jh\xc3/\x9d%\x8d\x93\x8d\x1dn\x18s\xdb\xf9\x81Y\x8f\x99\xc5\xca\xeb\xc4\x0e9;\xb8O\xeb\x90Q\r"\x06\xad\xa7j5\xe7\x87\x8b\xe6\xea\xca+\x1f!\x80\xbfR\xa4Q>X\x02W?\xe7\xdfCF\x19\x079\xd3\xf7\xbf+R\xc3(\xc1\xa7=m\x0c<\xa0"\x01!7\x01b\xf2\xc0\x16\xde\xb5\xc5\\j\x89\xddW\xf0\x0c\xbc\xe1\xce3\xf4\xcc\x80\xa9\xcbf\xdf\xcb{\xb8\xc0\xe8p\x08\xe8\xe4\xae\xf4\x84$\xe6\xf9\xb2\xebO\xf8&_W1\xca\xc5]\x13=\xa4\x18\xa0\'\x92|Q\x816\xdb;\xb4\xcex3\xb9\x9d\xa6~\x87\xfa\xe8\x84\xe0/H\'\xd3b\x01y\xda\t\n\xf3\x11p\x1d\xe8\xe4\xc5\xde\x92\x01s\x00{Rh\x84]\xffJ,\x05\xa1\xf1\x7f\x8b\x08Y\xfb}\xe8\x03\xfa\x06NV@9xt\xfa\xbc\xca\x8a,_=:\x97\x82\x9b\xbe\xd2\x94\x88\xb8\x9b\xecY\xd2\xf1\x00\xb2a=\x86\xc9\xd6W\xc0?\x15n\xfb\x1c\xab\xd0\xc9\x9f\xfd5\xaef\x94\xcc\x9cG\x9f\x0b\x8cRD\xc8\xd3s\x1b\x1d\xcaY\x1f\xa4[-\xc4\xd9Bh<>\x19\x81\xf4V\x98\xd9\t\xe7^>\x07\x9b\x8a.P\x82\x8f\x12\xe6\xb7\x92R\xec:\'\xd07\x10\x0f32\x84\x9b\x97\xfc\x9eE\xf3\xca\x82\xbd\xf9\xa8\n>\xe6)qS@M\xae\x80J\xca\x99\x12\r-\xe1\xdd\xe5\xb5\xe5Jg\xbb\x19\xf2\x05u\xfek\x17\x82\xee\x10%,\xf6\x06u\xeb/\xcc}(\xb6\x8d\xaa\x88\xb4l\xf6\xc6\xcb\xe3&h\xf6M\xf4*\x9f~\x8a\x11R\x03\xc0M\xbd\xd3\xce$p\xcb\xb5\x88\xb9\xcf*\xc1fzU\xbe\xa5&N,\x10D\x7f\xc98_\x00\xfb\xc0d\xcab\x8c\\\xbe\x8eI\x8d\x04\xfc\xcf\xa1\xb7*h\xac\xb9q\xba\xa1\x19\xcf\xa9\xcd\x92\x94\xbaS_G\x7f\xb7\xb9\xbc\x18\xb8mO\x14\xd8\xdehL+\xe4\xf7\xa0O\xe4\xbeUX\xd1\xc6\x0e\x1ee\xa0a\x8b\xc9\xdf\xfa\xafm\x92C\xabJ]\xc2\x13E\xd8\xb7P\xe6\xb4\x1eZ?\r1O\x996<\xdfZi\x0eM8\x95\xb1g\x91P\xf2\x15B)\xdam\x87\xfcN;F\xac@8r\x8b\xc7\xd5\xc3k\xdf\xc8\xa2\x80*J\x83\xb5\xd9\x83\xe6 =\x8b\x9b\x0f\xbf\xc4,\xecb\r\xea7\xb8\xdd\xc7\xfd.\x9eX\xf6%JD\xaf\\\xa6\'\x9a\xd5\xa9q\\;\x11\xca\xeeb\x85t"v"\\\xad\x08\xc6\x9f\xbf!\x8d\xa5\xf9\xba4\xaaRh}\xdb\xcda7\xa6\xe4\xcc\xfc\x8c/\xee\xe9\xae\x1d(X\xc6\xba\xc9@\x9f\xd0a\xc0\xedD?T}\t!y\x8c\x1a\xd9\x87\x9a\xf4\x1e\xf0\xf5\xb4\x8e\xbc\x15\x85!\xda\xb4\xbc\xae\xb6\x06S}m3p$\x01^\x92\x14\x9eu\x91\xb3\x83\xb4\'\xc9\x15\xf1\xe3\x16rD\x8d\x1e\x93\xb1\xaa\xa1^W\x1e\x15y\x9b\xd5~\x97^\x03\x16;jFt\xe9\x95l\x91\x14R\xba\xc0\xc8\xe4\x02Y\x93\xe5\x8c\xb8\xd1Fw\xd0jB\x8bQ\x98\xa71\x85\xa8\xa6\xafq\xc8\xd0\xf5e\xb9Hw@\xde\xc9Wn\xaf\x17b}\xb5(\x85\xaf\x1d\x8e\x9f6\xbe\xcc\xa3ezM\xbd;%bo\xa4>k\xb9\xc2\xb4\x11\x15\xa6G\xa9 \x9d"]\xd1[\xd6\x05n\x04<\x9e\x07\xe9\xe5\xb4\xd3\xc2|R_\xb7\xa9#\xbc\x0f>\xb2?mA\xf1\x18\x84\x1di\xb0q\xee\x0b\xad\xf2V\xaa\xc0\x0b\xa9V\xa2i\xc4\x0b1^\x92\x8f~\xc2\xf4\xa2\xc2e\xc2\x82\xf9\x83\xb3js\xb1*\x1a\xb3B\x82<4TM\xb5\x86\xde8\xa4A:\xd1\xf3@r#\x80A\x06\xa5\xd5\n\x94\x93\xb9\xbaBc\xde\x8aO\xf90\xd4l\xc8\xed\xfa\xfaYN\xcd\xa1\x81l\x857\x9d\xba\xa9\xf0\\\xf6\xac,Ni\x8b\x9b:vO\x8e\xfb\xc1N"\x02R\xfe\x8b\xd2\xc0=5\xe1A5\xdb\x81\xd6\x98f\x05\x81R\xd6\xba\x85\x1d\xd7\x8b\x13[\xfa}\x93R\xd7B\x9c\xb4~M\xc1\xaeAH\xaf\x94\xd1#)\xb9@\x87b\xdb\xd1_bw\x8d;\x92!\xd8,\x14\xc1<\xda\xe9`\xab\xd2\xbd\xba~\xfe[\xe3\rD\xf0\xdd\xc0\xea\x8e\x06\xfc^\x11v:\x1c3kP\xf6\x9cZ\xdc\xca \x83\xfa\x11g\x84\xeb\x18\xb0\xb9\x87\x16\xfe;\xa6V\x00\xb3\xdd\xc1\xb6\xb4`&r\x847\xbf\xa4D\x1a\x1a8\xa8g\xc6Z\xedA\x15@\xd6|\x99\x04v\xe7\xd8\xd7\xc9<\x9a\xa4U\xaa\x86}\x11;\x0c\xde\xa72G\x03\xf9Ng\x8b\xe6\xa2\x1d\xc0@}\x12[\xed\xda\x8c\xe1\xb1\x16\xc4\xe4Y\xf1!Gx-\xf9\xd0dJ\x91o\xaf9\x8e\xb9\xfaB\xbf\x08X:\x9e\xd2\x93]\xeaQ\x8bS\xd7\xa1|t\xa81b\';\xec\xf5\xd5\xca\xa4\x86\xa3\x1d\x06\x01M\xb6\xdc\xf1\x80L8\x80\xec\xdd\x12f\xcb\x82\xaa\xe8\xf25\x85\x9e\x05G\xe9J\xc1 \xa8\xce\xd3\xf7\xef\xd3m\xa3W\xa5\xdf\xef6\x0c\x15\x91\x9f_\x14:\xf2\xdc97\x9d\xd4\xa4\xef\xdd\x0c\xe1\x19\x05[\x15\x8282\xbf\xd6\xdf}\x101i\x9dVo\xd2\x969\xca&d\x9cv)\xb5*\xf7\tH\x9a\x15\xdeG\xc6\xa1\x14\xe50\xbd\x82\xa4\xe6\x9e\x0c\x98\xc3\x02i\xf0\x1cw\x90\x8b\x11\x18@?\xfe\xbc\x88\x12\xf6\xd3\x05\xb7\xc5E\x0eE5\xfc\x0e\xa6\xa6\xeay\x91p\x0099/\xfd\xa6hV<\xf9\x98\x839\xb3\xd5x\xd7F\xc5\xb5Q\xae.\xde}\xb12\x82{\x12\xb4&\xd9\xb9\x8d\x9e\xae\x92\x8d\x86=\x1f\xd6\xb7=Ux\x15\xb1\x98v\xe4\x91\x99Q\xbf\xecEV\xb2)a\xfd\xd2\x88\xb5\xabrm\xbbc\xf2\x00\x87;\xf9\xbe\xb9\xc7\xe3!Q]\xd7\x86\xf3)=>\xf7b\x1e\xbd\'\x12\xe7\xa9\x82L/\x05y50\xad\x84=\x85\xce\xe8N\xb2\xef\xc4SCe\xd0+\xfe\xc4\x17p\xae\x96,\x8b\xa5_\x97@t\x08s\x89\xa2\xa1\x1b\xc6\x96B\xaex\x9a\x1e*\xf2\x0b\xf7E\xa7\xb2)\xd9g\x81i+\xc5[\xc1{T+\xadN/\xd7\xfd\xa5R\xa9\x9f[\x11:\xfb\x94\x8d\x9a\xa3O\xa95\xf5\xd6h\xb2A\xc7K\xd0\xd8s\xd0I[24\x1at}c\x83A\x1a\xf2\xedpw\xfd\x9au\xc5kY\xf7%:he\n\xd9\x19A\x8c\x0c\xbe)\x8d\xa3\x80Fv\xb4\xefHF\x03\xbe8^\x05}\x16kY\x8a\x1cu\x80O\xed\xab\x9a\xa8$\x90#\xd3\x8dSt%\xeat\x84\xb1\x86pg&\xb7\xb7\xf4\xac\xcb\xe3\xfes\xeb\xcb\xe4\xbd8\xc8\xd8\xfc\tPHl\xfc\xaaW\xd6\x02\xe8\xdf\x13\xae*\xba\xe9=F\xe1\xe3\xe9\x9d\x00+\xf3Kq\x13\xc64\x9fX\x88\xa1rQ\xa0\xb0\x88\x91\x0e\x8f\xe9|D\x04\x04\xd72~\xe3I\xe0\xe5\rw\xd8\xe3\xe4\x89\x92kC\xe1\xa9\xee\xadO7Y\x18\x87\xa1B\x00\xfe\xe0T\xf3\\\xd3\x8d\x9a\x84_\xbbD\x19\x81J7\xde\x9a];\xac\xc1Q\xfe\r\xad\r\xda\x0b\x07\n\xdfe3\x13\x9e\xae\x16w\xf4|\xa5\xe3\xc3\xfa0\x0c\nTe\xdb^\x88\x1f\x17\x91[\x1f4\xebC~j 8A\xe6\x08\x07\xc8\x0cc\xc8\x977\x87hF\xd0\x80\x0cE\xd4\x1f\n\x13]\xf2w<nc5F\x9b\xc0\xd5.z\xa4\xc3a\xe2\x1dL\xfdy\x18z\xee\xb127{\xa1\xff\xcb\xbe\xa1\xa9\xf7\x8fL9\xd1\x9b"$\x98GL\nC\x00D\xbb\xd2\xe7\x9d9~$\x05\xdc\xe6\x18\xf7\xc3\xb3D9O\x97\xa1\'\xbfMP\xa3\xe6A\x12\x90\xf4\xd3\r\x95/j\x9d^\'0/\x1a\x8e\xeb9\x03\xc2\x1c\xa8R\x1d\xe1\r\xea\x0e\x87\xce\xf3\'w\xff\x8a\xe8\x15\xf9\xe6t\xf3=kn\xcbS\xc7\x8c\xdab\xd1F\n\xab*a\xf7\x86\xf1\x8b\xb1\xabP\x80\xf2\xc8\xca\x1d\x19\xc1@\x1e\xa4\x95\xb0\x05Y_Gu\xc19\xfa\x1d?\xba\xb5\xf0\xb0\xc3\xcc\xc3\xd1)\xb1\xf9:\xb4\x96l\xb4\xcen5tO\xdf+^n|\x1f},\xc7\x1d\xce\xc6+>,u\xe0q0\x17\x07@\xa2!>\xb5|(!\xad#\xebm^\x0fy\xbf\t\xd7\xd9\x02;J\xcf|v\x93F\xd7.}s\xe4(\xcf\x01\xe4\xf4\xab~f\xaaXc\x18\xdf\x97Iw\xb9\x98\x85Y\xb5\x932\xbfD=\x0c\xad\x02\xc8.\xbb\xb4\x14\x988\xc3L(\x11\xaa\x06\x08\xa4V{\xc4\xae\x12 i\x19\xba\xc4\xbe \xbc\xe3\xccoH\xee\xb1\xfdS\xc2\xea\xd0R\x80\x80\x88\xd2\xea\x9cqw\xec\xf9#\xc4]\xb0\xcd\xae\x96U\xbaHw\xbf\xae\xa8O\xed\x12\xb4\xc9\xd6\xe059F\x12\x12\xe70$<`\xb1\xd5in\xc37\\\xe1\x92CR,\x07\x97Mi\xcc%\xa4\x18F\x84\xbc\x08A\xfc\xd5\xa01\xc2m<\xcc&5\xa1\xfd\xb8g&I\x91*F\xdev\x95^\xae\xe8\xcfD\xff\xcf^\xb8,\xbb\x89]\nf\xc3\xae;C\xbf\x87\x90\x1b+\x97\xbb\x1a\x83\x03#(d\x9c\xa5Ew;\x85\x84e\xdcF\xaf\xf4\x15\x94\xac\xb68M\x02|\xb5\xdd>;\xdc\xe4\xf2\xf1\xfc\xc3\\\x0f\xc9\x8e\x00\xb6g!\xb5\xee\x11\r\xc5\x8d\x17<X\x0f\xfc\xba\xcdI\x08/\xa2\xbe\xf9\xf3\x8e\xafI-\x84\x08\x10%rQ\x19\x8f\x19\xd2\x0e\x80)\xd0? F"\xbbm\xeb\x17]\xf4\x8d\x17\x1b\xde\x8ei\x1f\x8e\x948\xba\x05\x0b\x10C\xb1o\x0b-;\xe9O\xacK^QN\xecuF\xa0\x08\xde\x17D\x06f1\xaa\x02+\xd8\xd9F\x07Q1?\xde!\x85\x8b\x00\x8d\xd7(\xd5\xc0\x82\x93\xa0\xab\x91M\x1e\xb0T>\x11\xc3\x7f^T\x97N\x82\xe2\xab\xaf(\x87%[a\xe9\xd3+\xba\xb4X}\xb1\xdc\xc6\xb1\\M\n/4\xb7\xc9&FM\xca\xa8w\xe2\x1cnq\xd3z\x02\xd3\xbd\xd1{\xd7M\xc07\xae\xc3:s\x9a\xba\xd5\xa5\x0c}*o\xb9Q\x992\xe9\x11>\xb8p51,\xfaX^\xc0v!]M\x1b\xb3\xea\xa2\xc0aN)\x11\x8aMVKH\x1f\xe6\x0e\xb9\x15RO\xe3\x15\xef%\xb2\x1d\x9a\x945\x9d\x14\xeag\x87\'\x86\x89\xab$\xe2:l\x8d\x83\xcbzW\xa7\x10\x90ERG\xbe\x08\x8fJ\x8bx\x07\xad\xc8\x89\x84\'\r,5\xf4\x97\xe3-\x0ee\xc6a2\x99\x8f\x9e\xd9F\xf4f a\xea\x14\xd1\x83T\xafkh6&\x12\x1d\xcf\xfad\xd6$=\x11i\xc6<\xd6\r\xeb\x88\xa5\xbe\xe7z\xff\xd7;\xce \xbe\x1d\x9b+\x03\x96mKv\xcen\x98\xafe\xcd\xce\xdd\x95r\xd73\x92\x19\xe8^\xa8\xa7\xaa\x89\xbd\x89\x06^\xe4\xec\x02u\x82\xfdiq\xc1k\x994\xf5\xb8D\xe2\x17\x82\xd4t\xe9G\'N\xe3\x16i\xb7\xed2\xc6\x98\x99[$\x91Q\x12\xf5S\xf9(\x16\xee\xfb\xe5\xc2\x14FY{Q\x9a\xd2\xf2\xce\xe2\xce\xb1S\xb6*4\xe4\x95Fh\xc6\x96\xf1Q\xfe\xcd\xee\xa4?\xce\x0b\t\x06\xe8"\x11k\xb1\xcbN\xaa66\xf0\xef\x9a\xa2\xca\x06\xd5c\x83Q*\xc4\xf2\xbf\xeb\xba\xca\xbf\xc8\xd5\xff\x15^<\x8eA$A8|\xf6+9F\xbf[\xef\xc2\x1a\xf8\xeb\x0c\xf5\x1caK\xac\x125\xab\x00\xefe\xb1V\x9e\x87\x91\xeb\xcb\xb2\xff\xb1C{y\x862\x7f\xde\xff\x8e\xbe\xfd"i\x98\xa7\xdc\xb4$\x1c!\xe1\xa6\xfbb\xa8\xb5\xe7yi/H\xde1D\x05>&]\xfb\xc5\xa3\xca\x9b\x9eG2\x9a|\xa7kO+\xcd \xda*\xf7\xc7\x02q\x83\xfdr\xf9Z\xd10\xb6\xe4\x0b\xb5\xe0\n\xee#\xa3\xcb\n\xb2\xaf\xd2\x82\xc8U\xff\xb6\xff<\xe3\xf0\xa8\xb45qx|C&\xb5\x03Bf2\xb7\xa7Il\x9af6\x8ez\xaf\xea\r*4\x05\x1e3\xbaT\xfb$\xdf\xef^ex=L\xc7B\xcb\xac\xd2\xec\xd2\xe0ew\x83*C\x0fK\x84\x18*\xb6\xd8\xb9\x90x\xda;\xf3\xc4p\xfd\'\x08^\xa9\xfd\x93\xdc\x89B\x90<\x81\x0c)\x16/\xc5\xdb\x1fcD\xc5e\n\xca.\x85Z\xf8c\x03wP\xd8\xd2\xf6\xd1\xd7F^\xb7N\xe9\x9aN\xec\xbfF\xfa\x9e+\x8c\xef?\xc0\xe4$\x013\xd9\xf2c\xfc\xa6\xe3\xf6)\x85\xc4\xab\xdcl\xbfZ\xec\xe2 \xa5\xbb\xd0\xef\x0cF\x18\x140\x81aY\xd7\x16\rjI\xef\xd8\xee\xa5\xc0+\x16\xee\xab\x01\xd0\xd01\xfc\x1eF\xf7\xb1&*\xdb1\x0b\xf4\x14d|\x94\xf4\xae\x16\xef\xc0\xae\x04\xa8\ro8\xda\xc3X\xbep\x91\xd4\x1eF\xdc{\x89\xe3\xb1}X\x01\x7f\x14v\xe5@2p\xda\x88z\xad]\xcf\xc4{\xc2\xf8\x81\xe12\xa3f\x12\x1c\xe4\x1fvv|\xd6Nh\x9d\xc1Hy\xe3\x9ee\xdc%\xf3\x12\x18\x8e\xd9\xe7\xeb\x99MA~\xf6\xf9\x82\xbb\x0b\x055C-\xa2\x8e\xe6\xb1\\\xe1e\x86\xe8\xfb6R\x9d\xe8u\xc7\xbeEW\xff\xf2D\xfa\xa5\r`\xa5\xe68\xaa\x06\xb1iW\xbc\xc0\xda\x11\x1e\xaf9i\x1dA\xafW\xf7>d\x9f)m\xc6\xe0WM\xc1\x8be.S\xb6\xf5\x08BL \x9c\xaf\xe39\xe9\xeaf6\xe1b\xd4\xb0\xdc\xa9\x8c\x92b\x944\xd3Qzj5\xa1F\xe1v\x10_Ms\xdf\xd7M\x0f\x1fyUPy\xbd\xe3\x07\xf5\x82L\xb6\xf0\x83\xd4\xa8\xd1"\xf1\x9a?\xe2\xd6\xa8Gt\x05\xb6Lq\xfa\xce\xe7\xc9\x04]\xe7X\\d\x8b\xf2#\x8f\x9a\xe7i\x91\xef\xd1\xc2\xc7\xf1\xecA\xb2\xae\x84\xdd\xbbm\xf6\xb6\xa6\xd9\xdf\xd3^\x9be\xd2A\xc7\xf8\xb0\xd6$\x13cYj\xbe\xc1\xbc\xfa\x86\xdb\xac\xaf\x13\x08\xe7m\x0b\xcaz&_\xa0h\'\x80\xd7\x8f\xec\xdf\x99\xf8\xc4\x98#\xe6\x97\xc1\xd8\xc2\x18Y>\xda3\xa2\xda\xd4\xc3_\xb1P\x15O\xd32\xa9\x8c\xa1\xb3\x85 \xa8\x02\x05\xb4\xaf\x94-Mw\x18\xf4\xc7Y\xa3{\xad\x13\xb9D2ul\x1ba\x82&\xec3\x82\x1a\xe9\xc5"p\xd6\xfb\xe3\xf6T\x0f\x9e\xcb\x90Y\x8f\x05q\xc5\x02\xd8\xadN\x97\xf5\x1f7\xb58h\xaao\xcd\x9c\xe7\xb8\x1c\xd9\xda\xa9\xc9Uj\xf3\xaf\xcc\xfe\x19`\xaasP\xee\xdeYF\x82n\xb3a\xb0\xef\xe9\x99\xdb\x05\x8dJ\xa5\x0fG\x16P\x89\xfe8:cm\x1fv\xbd\xdd\xd4A\x8b_ \xfa\x10\x19(g\x97\x0e\x17E\xbd88\x01\xf1P\xc1\xece)h\xab\x15\x90\xf9\x9a4\x1c\xb3\x8c\x9e\xcc\xbe\xc4\xc2JlP\xc4E\xcc\xc6F\nY\xd7r\x08\x98\xab<i\xc5\x98cw_\xf1\x8e/\xdd\x90\xe6\x90\xf5\x11.\x87\xdb\xd1w\xe6\x83r\xaaJ\xfb\xa4G0\xda\xad\xff!\xcd\xdb\xb4e_\xd6\x88d\xa8,(\x16\'\xb8\xde\xee\x82\xc9f+\x19J\xc2Y\xa2<_\xecb\x1c\x95\xd4\xf8\xf8\x14\xcb\xeeZ\xbem6\x06\x06\x9d\x1f\x8e;tA|\xf2\xfc\x94\xea\xbf%aO\r\xaa\xad\xc9\xdfzH\x1c\xb6\xb4\x9a\xd2\xcc\x1e\xea=di\x00/\x17`\x1c\x1a\x86\x07\xd4e\xe1\x0eK}5 \xec\xc0\xe0\xa3\xf1\xc0\x85V\x7f\xfc\x15\xa6\x1f\xcd(\xda4\xcc\x19\x07\xb6;$s 1\xf9\\\xc5W\xcd\x87(o5\xa9ot\x8a\xd0=\x05c\xb5\xcb\x1c\xb8\xd4\xe5\x9eT^\xba\xc3+\xab\xcas)>\xe4\xa8i\xe1N\xaa\x84E\xf2\xb5Y\xe8\xb1F`t3\xfc^\x88\xc2,\xa3\tLz\xc7\xcb@\xa9O=^\x08T\xf2[\xcf\xb6A\xc2_\xecU\x1e\xab\xeb\xcd\xe0<\xde:4\xe63\x9b\x16\xe68,\xbb\xb0\xdf\xa5\x9a\x19\x8f\x02\xd1\x07wA\x95\x0e*\xcajR\x02\xb4\xcb\x04\xd3\xfd8\xf9\x83)Vj\x83\xbf\xe9\x16G\xa8\xe9\xa2\x10w\xc3w\xb9\xb1\xcd\x1b\x1b\x04i\x19Q\x95\xd2\xa2\x9d\xa8(\x84\x97\xec\xa7GP\rW@`a\x88\xa1\x98$I\xbb\xfc+nis\xcf}\x8d\x08qaC\xe8\xc4iUX \xbd[\x92\xfe4\x10\xdf\xf4z\x0f\xb3\x80\xff\xc7\x01\xfa>\xba\xfc\xe3%\x1c\x18\xbd~,\xff\xccQR\x8eam=\xc8z\t\x85\x91pO\xb5\x90\xa9\xce\\\x1a\x87X\x9aV4\xb2J$?i.\xc8\xd2\xdd^p\xe7ao\x82\xccX\xe9\xe8\xe3\xf7c*@\x86vg\xcc\x91h6~\xcaT`\xb8\xe9\xec\x92N\xb7\x87C\xa5K}\xbd)\xdc\xe8\x17\xc2\ts)\xfaG\xf3]\x00u\r\x05\xe0\xff\x8cp\xb8\xb3\xb2X\xe2\x85\xb0>\x84[\x8fbT\x7f\xb6\xaaQ\xa3\xf1P\x88/\xbf\xfb\xb7R\xa1\x1f\x89q\xfc\x83\x15Y~L\xd8\xd1\t}\xea\x16\xccA\xc0\xef&\xf5k\x1b\xce:\xa1=C@O\xc6+\x06\xf8R\x99\xdb\x81\xfd\xf5\xd7\xbb\xe4\x19\xe8\xe8\x06%\x9cA\xe6>\xa2\x19(\xe2\xa9\x02\xe1#\x97\xa8\xefV,\xd7PF\xf9\xba\x94^\xcd\x11l\x80(\xb2\x10\xe3T$\xcf\xc6\xf2\xc5iG\x9a\xa1\xe0\x11\x89\x11\xed\xdcuw\xadT\xaf\xcd!y\xbbq7\x93\x01Z\x9c\xa1\x02k\xe0\xa6\xe2\xc3[\xa4rN\xb3\xb9\xf6\x04H\xf0\x9f\xee\x00\x03x\xd5\xc4\xae\x94\xbd\xe7= >\x81\xc9B\x8dO\xa9S`q\x88\x87\x81\xa49%(\xce\xd0\xa4\x9e\xc5\xb0\xd8\x9aa\xdc?\xa2*\t\n\x04Q,\x86\x9f\x9e*\xf7\x90y3<&\xad\xd9\x84\xa5ul\xf6a\x97\x87\xc4\xa5WQ\x975\x91\xce\xfe<7\x94\xa1V\xb0+\x90t\xf4\xa1oL\x7f\xb4\x81r\xc7!g\x04K\x138\xc1\x1c\x86[\xd8\xedNI\x8e\x98\x85\x88T\x89A*T\xf8\xa7\'\x91\xe7\xcd\xf8\xcf \x08Qepj\x1e\xaa\x07\x8f\xb5\xf2h\x92\xbct\x8az\xcc3\x14\x8f/\x15Vmo\xfbL\xbe\xd5\x84\x11\x8d\x89K\x97\x86\xc4k\xb6\xae\x10\x87Oh<O\xe8\xe9\xc5\xd8\'\x969\xe1\x086\xa5g\x94\xd4\x94\xec\'Q\x9bF\x97\x98A\xd4\x95s\x01\xcbA\xde\x05\xad\x82\xd9\xc9\x89\x13%Fx\xd0\xad\x12-\x8f\x01\xf2\x1fY\xa6\xb6-n\xa5@\x98+\xd5\x14R\xd9"S\xea\xb3\xe7$\xe2\xbc\xc2sIC\xfe[\xf3\x07\x11\xa5\x82\xd4\xda\xccJ\x19[j{\xab\xf8s\xf6\x0bt\xfcn\x03\x97m\xec\x90u\xfb\x16\xfaA\x1deH~\x1bD\x9f\x86T&\xad\x1b\x8c0WVa\xe7\x07\x17\xbaq0P,r\x90\xce/\xf9z\'\xef\x14?\xe1H\x89\xbe|\x1c\x11T\xbc(\xa2Q"\xa1\xac\xb1\x8f\xc6\xfcnl\x8a\x88E\x10\xea~\x85JX\xd1!\xfeep\xb4/\xf7\x8c\x01\x17Ka>\xdd\x10\xb6\xd3Xnt`\xba\xf2},\xbd**:\xf8\xfb\xa7W\xda{\x94\xe5\x89\xf4\x94e\xcbp\xca6\x13W&\x9cD\x1a\x99\xc1c\x12\xaa\x7f\x03\xde\x05N\xcd\xce\x01\xbb\x1e\x1d\xc2\xc1Cd\x9f\xe3\xdc\xb4\xf1^\x0f\x13R\x7fLX~\xa2\xacE.[W\x88E\xbd[\x9b\xd0\xd2\x9c\x14\x98\x86\x94\xa1\xce1\xebO\x02\x8dW\x16oB\x81e\xf2\xf9\xf8\x83P\x87A,Nw\xd1\x07G\xf7\xea\xf5\x8eS\xd8\x1e\x95\x8a\x0e\xcef^Y\x1f\x8d\xa6\xb6:\x10\r0\xad03\x9aP\xf7,\x1f\xdf\xa0\xe1\x81\xe4\xf7\x02\xfdGp\xb87l\xdf\x9c\xbdFW}\xc18\xf3\xe3\x97\xb4\xf5\xc5RxiI>b\xe7\x9e\xc2q\xaa\x84Tm\xd5\x86{\xb8U\x85\xdb\x88 Rz\x0f\xf6F\xcd\x8e\r{\x9auP/\xa3\xa7w}>\xdd\x10\xbdi\xdc\xa3\x8a\xbet\xf1J"\xcbP\xff\xb0\xaf\xee\xa2\xda)i$37\x9d\tW;\x80f\xa8\x86\x10J?]\x86\xc7\xf1\x8b!p\xd3<\xe2\x7f\xeaEwlzw( \xb4\x18\xbf\xc8|\xef!\x08\xb0\xee\xf2\xdf\xba%\xc0\x11g\xcep\n\x8c\xe6\xc2\xf6\x05\xbb\xba\xcbP\x1e\xad\xc6\xa9U3>\xbaK\x97\xc8\x1c\xb9&m\t\xfbu\x19\xe8Vj\xe7\xe3*\xed\xc8\xf9C\xdfF7&\xd6\xc2\xa9\xfc\x0ea\')U\xd0\x1bK\x8b\xfb<a\x99l\'f\\Q\x0f \x87\x8d\x87\x11A\xb60+\xc5\x7f\xcf\x89\xd5\xac}\x16S\x95\x840\xa4\x19\x12\x8b\xd88\x19\x19\xb5(\xc4\xb4\xe1\x7f\xa0-\x02 \xf6k$\xf7\x1e\x80\xb2\x1b\xa44"\x18D\xe2\xb6\xe7\xf1V\xc9\x81u\x9aT\xd1\'\x1e\x117w\xe4/\xb5_\x11\xb5 \xe1\x83A\xa9\\ 8)\xc6\xe7&\xcf|\x1c\xf9\xda\xb2\xa5\x85\xa7!\xed[\x06\xbf+"\xd8\x1df\xb0\xdd\xb5\xfcw\xa6\xc6W!\x1fd\xd1\xa2=\xdf\x00\xf7M.X\xf3#\xc75\xb6\xb7\x86\xc0\xbd\xeb\x98\x8e\x91\xc0\xbc\x99Q\'gP`L\x97\xe6\xa3\x85\xaa\xd9\x8b\xc2\x889\x8e\xcatl\xb0e\xbba\x8cy\xbd/\x18\xb3\xe5\x95\rK\xb8\x00\xc3\x94\xddQ\x8a?\x0c\x91\xe4=6N\xe71\xf0J\xac\xaba\x91\x12\xe2I\xd2\xaa\xb0\xb5\xb5\xed\x89\x8c\x05\x18^\x03\xe3\xf7\x93\x11\xafGb\xf9\xf2W\xa124-\x8d6\xd5w\xcd\x1c\xe1\xb1\x9a:}Ju\xc5\x98\xa5=w\xae\xe9\xfb\xa7\x18\xf6\x99\xeb\xe1\xdf\xe8\x88\xdef\xbc\xf8\x9bU\xff\xae\xdb\x7f\x80\xff\x1e\xc6w\xaa\x1cYS\x16%\x1c\xe8ja\x80\x0e\xd9\xb8E\xf1\xc6\xb0\x99\xb87[C\x11\xa60\x1f\xdd\xdb0r\x16{h\xdd*\x87\xb7jP\xf4\x12\xc8q\xbe\xf1K\x92\xab\x03\x03\x98\x9e\xebPPyU\xd7_38q \x8d&F^\xaf\xdf!^U.\xd8\x07H4\xfdF\xd1\x92\xad\xc6\x07f\xf5\x8b\x83L\xf4\xb4\x8f\x99\x1f\xdb?\xd8\xe0\xfb\\\xa4\xbc\xd3L[\xe1\x15\x80\xc5\x99e\xc8b\x13\\\xdd\xe5.6\x99\xf5\xee\xd4\xcb\xa9\xf8\x87f\x0b#\x83.\t\xdc\x83`\xa5\xe1\xd1\t\xa6\xd8\xbd\x95kQ\x94<r}\x96\xda\xb4\xd1\xea?\x8a\xe9\xd58i~\xe2\xd0\x84\xcc\xc7\x86)\xd5\x1d\xb7\xb4i\xc0_\\\xab\xda\x9f!\xd3\xd7\xba$\x1c\xff\x19\xd12\xbc\x1br\xc5\x92^\t\xc1\xd5DB\xf1*\xf7\xaf1\x1a\xe6 \xda\xeb\rq\x91\xa3\xfe\xf4\x18|F\x86oDQ{\xc6\xb3\x90\xb5\xaf\xf2v\xb1\xe3\xf1\x07\x140\x8c\xa8\xca\x1d[\xcb\x83\xd7\xa1\x1f\t\xcf\x07\xdf\t\xbe\xd5\xe4i1\xa6B\x8e\x82X\x15\x92M\x11?\xbf\xec\xae\xb7\xc7rZ0\x852\xfe\x8d\x85\x81`..\x12O\x07v6 \xd2s\xfeL\x0e\x16\xb0\xf5\xae\xd4\x08\xa5a\x01\x84t&g\x06\xb5\xd0p\xda\xb1\x0bN\xf3\xed\xb4\xc6\xef\xeeUz\xc4\x8d\x08\xdeYk\xde3\xf5\xe1\x0e\xa3\\\xd4yG\xf1\xbe\xe8\xa1AyM\xaa\xd4\x00ZXw\xf3O\xb7\xb0\xf7\x85\xfb8C\xf5\x00z(1\x18\xa3\x0f\xf5>\xb8xW\xdex\x9eO.\x1f[)\xf3r\xc3\x99simt\\\xd5\xf2\x05\x05u\xef\x0cS\x8c\x13\xf6\xc8\x85W\xe5\x04\x1b/\xb3\x12\xfd/VX\x9c\xe0\x1b-\xfc\x8e\xaar{\xb3O\x16\x03I\x13\xac><\xc8\x80\x06\x10\xf9\xe8\x08#\xe20\xb1\x91L\xd3\xe7VL^j#\xdf\x988\xd5\xcf\xa7\x88(\x86t3\x12\xaf\xde\'\xce\t\xe1\r\x9b\x0e\xc9\x95\x97\xca\x08\xe5m\x032\x89\x91"\xf4\x17\xf3G\xaf\xf7\xd5\x8d\xa9\xf9dv\xf1\xab\x01\xf8\xf2\xc3/\xc1D%\xbcnP\xa5\xca2\x15\xa7\xe9\x03q\x1d 4\x98\xfeoh-\xc4\xbd\xd0\x15\x11:NY\xad\xad\xf7\xf1\xa3u\x11:C\xaf\xd3[oM\xb1\xe2\xdfX\x19T\x93#$<\x8c\xca\xbf\xear\x8a\xfbv\xfc\xbd\x0cD\x04\xd8\xd0\xfbI\x06\xb2\x9d+\x97\xbc_\xb7\xb9\x8a2\xe4\xb7\xf6\xc5\x93\x9a\xbf\xee\x89D\xfeD\x05-\xea\xb8\xd5\x92oo\xb4\x99\xfd\x83\xd9\x10\xef\xdb\x86\x9d\xe8\xa5o\x86\xf2\xbfR\x82\xf4\x0bs\xe2\x1f\\\xe0\xf5Mk\xf4\ro7E\x01\x92\xcaK?4J\xe9\xba\xc97\xc2f\xa9\t]n\xebg\x15\x85\xfc\xe8\xc4TA\x00\xd6\x92D\xbf{i)j}\x8d3q\xb9\xa8@\xb1\xa9\xe5_\xdf\xf3\x90\xf6\xd3\xe5f\x9foT\xf0o\xa1\xb1\xa3\x0b1\xf7\xb7&\xd0*P`\xabl\x05\xec\xc0\x17\xf9\'xP72\x96\x84v\x89\xb6\x89Z\x81\x02[v1\n\xff\xef\x82\x90=\x84\xfdf\x93\x9e\xb3\n\xb4\xcd\xcd\x83/?\x07>-\x8e\x14\xd9xz\xf0\x05\xf5\x0e\xd9\x9d\x9cI\xd3<yq\xbc\xe3\x08\xd5u\xbd\xc7g\xa4\xa2\xf7\xf85I1\x9a\xb9\x1b \x1f\xc8\x94O\n\x8e\x7f)\xa4~u\xf7\x84>\xbe\x96\xb9\\\\\x12^)6\x84-t9\x11?\x0c\xed\'\x95\x8eN\x95\x16\x92\xd8\x1a\xd8\xa3\xf7\xc3y\x17\x88&\x1a\x81\x9c\xd6r\xb1\xc3T\xfbX\xd2f-`N\xcd\x04\x910\x10O\x95\xa9\xbch\x91\x8d\xd1|\xc9\xd6s\x8dv}.\xa6p\xd6\xd7\xba\xb4\x97"\xae\xf6\x7f\x0fo8\x995\xde\xdd@[6\xf1\x8c\xf9\x11P\x94s\xff\x8e\xd2{\x85($\x92}\x01\xb0\xbdq\xd6\x916\x12\x8a\x9e\x1b<7\xce}-#\x85\xfbx\x99|\xba\x1cZ\xe5v\xd0\xff:h\x91%\xaf5T9`|\x1e\x86>\xf8DL+\xf6\x87oN\xdf_\xc5\xcc\t\x82\xa6\xaf0\xcd+v:\xe4\xf7\xc2\xac\xfa\x18\x81`XA\xa0\x1d\x81\x00\x02\x08\x8e\xe6;Z\x03\x05%,M\xccz\xed\xdd/(\x8b\x8a\x036\xb3\xb03\xb98\xd4Mm\x1c\x0b\x93S;Z8\xd0\x93\x08\xc6\xd3\x91\x17\x84\xa7\x92\x80\xcf]h\xf7 \xa7\xd5\xc6\xdf\x89\xf9>\xd1\x0b\x85\x14\xc6\xdal\x7f}\xa8k\x91\xa2|9\xcen\xad[#\x065y,6\x84~\x84\xc4\xcb\xd1\x02$\x93J\xb1\xeb\xbe\x03H\xdf\x89\x041RVS\xc2\xce$\x95\x94\xe8?o\xe9\xaf\x80\xde\xd5\xdb\x01\xc6\xfb\xcf[L\xec\xdb\xc5<\x17\x9b\xa9\xd9FE\x83\xc6\x9d\xa7_\x94,\x11\xc1\xa7R\xfe\xbep\xd3\xd0\x1f\xa5Kn8\x0b\x84\xe9\x860L{k\xc7\x8b\xc7\xfc)\xd6\xb5i?\xe9}\x19\n\xc6H\xbb\xeb\xf9q\xd5?\xba\x81\xe0\xbe\xfa^\x8a\xc3\x82ol\xa6\x05\x02\x87\xab\xf9\xab\xf6J\x9ck\x1fZ\xfe\xfa\xb6\n\x1c\xf0e\xc9R\xc5L`I\x05\x05\xb5\xce\xe9\xb0.\x9f\xfa\x96\xe5\xbcM~\x8a\x93,\xb9l\xf1_`5b\x9eDC"\xbd7PWT\xa7wm\r\xa0\x1a\x92r_\xe8\xc4\xfe$\xc0(\xe9\xbd\xe5\x83K\x88w\x9f\t\xc0\tM_W\x91?|!\xa8\x9e\x12\xb3\xe4\xe7-\x05O\ny!(i,I\x9c"\xaa\xd4W\x17Vd.\x10\xea\xffr0\xaf\xfe\x1c\xfe\xf1\x1en\x8b\x9c\xf3&\xa9Q&\xbc?\xf5\x82\xe5\xe1n\xeb\xd8,\x06\x91\x8e*\xd8\x1c\x9dZ\xf1Hd()PF\x8bF\x0fGi\x88\xbe\xc1\tb\xdc\x03\x01**gC\x0c\xbeI\xe8]\xd0\x12g\xf3\x97\xb6\xb0c\x12y\x10\xaa\xdf\xe4\xb8\x94\x1b\x0b\x0fM\xfe\x16(!\xbbr)\x88\xf9\x8b\xacc#\xc0n\x8e\xe9\x83\xd8;\xf5E\xdc\xfal)\x8b\xd5YgY\x86\xfb\xe4\xff\xef\x0e\xe0\xcd\x1f\x9f\x0c=\xe5\xc6:Qc\x10z\x8f.\xbe\xdfl\x96\x93\x9bf\x9cW\x08\xae\x8e\x88\x00\xbfl4\xbd$!\x0cs9\x99\xe1\xf0\xb0pS\xda\xaf\xe4\xe2\xb0\xa9z\x8a?|"\x96\xf3\xb8\xed\x8f/\xb8\x88\xb9\xa9\xb2\x19\xe1m\xccD\xabi\xb0S$\x05\x9d\xf1f\x1b\x9b\x98\xe7\xb7C\xec\xa4j\x89\x06%\xc9\xfcs\x18\xf2\xadv\'C\xe1\xc5Qp?\xcf\xb6@\xc5\x92w\x9doB\xb98\x00\x7f\x9bJ>\x81\xd8\xac\x10v\xd2KS\x8d\x96\xfdW\xf8\x84\xd0\xd2\xc4T-\xa3\xcd\xfe\xc8\x11\x87\t\xb0\xb7\xe4\x1f\x98)\x13\x11\x11w\r\xbb\xb7\xa9\xf2\x11U\x9ddE\xba\xf8\x07\xa3,\xafXa=7\xf2\xa3\xd9\xc1d\x82\xefQS\xd2\xf2\x95\xe8\x0cL\xc3WYr\x16\xf7\x8aT\ru\xd6\xa6g-\x86\xc6\xab\x86\x9c9$k.{\xa5p\'1\t\x13\xce>[\xac&qq\xbcb$\x8cbct\xb6\xdal\x0b?\x1f\x8c8\x1e\xd3\x96\xd5\x12\xaa\xae:\xa9\xedU~\xf1\xd1\xdcA\xba\x96\x17}v\xda\x1b\x81G\x86XG\xfe\xd1\xfa\xee\x16g\xc8\x9b\xe5|\x0f\xe3\xcbW\x9f\x01\xd5\xd4\x91\xd9\xcb\x86oK\x1d\x80\xd3\x84Ib\xeb\xd5\x12\xa5i\x13/\x08\xf3\x92\xc8\x19k\xb8\xbc\x1a\x86W\xe9\xf4\xf0\xd1\xbdU\x7f\xd1\xc0\xe6\x1f\xc1-\xa2\x88UUZ\xbb#Ok\xa7\xaf\xee,\xc9\x19g\x89\x1e\xa0bb\xe9\xbe\xeaI\x08Q\xfa\xc3\xefU\xe5s.\x9e-N\x85\xc8\xfe*2aw\xde\xf9kg\xf5\xbd3h\x82\xbc\x0e\xb6P\xa5\rv\xd6k\xc8\xaf0f\x0c/\xda\x9b\xd9\xacje\xc9y\xc08\'\xef\x01w\x13/\xb1\x02\x9am\xc0\xba\xbe9\xd9\x8f\x94\x1b&L!P`\xa1\x01\xc1\xca\xdbI0\x89\x08#U\xcd;\x14\xadAe\xb0\xee\x88\xcd\x91o\xf7\x80*\xac\xe19\x90\xdeE\xde\x8b\xf1\xf7R\x9d\xcc\x15\xdc \xd3\xf7\xb0}\xb9\x95\xbb\xcc\x9c\x005\xa2\xa1)\x0e\xcc\x02J5\xed\xb7Q\xcf{\xf5\x84\x08\x89\xc0\xaa"S\xab\x0f\'\x17B\x97\x96a\xd9mI\xf6\x13\x0b\n\x98\x9b\x8a\xe2\xfe\'_\x96u\x03\x9c\xf2\xbd\xc1\xecuh\x83\xc7U\xe0XHWi\xb6\xf7\x9b\xcd8"\xeb\nl7Z1%\xa0Q\xecl\xbf\x05\xdf\xdb\x0c|\x14\xbaf\xd4\xe5\xc2\xb7\xed\x13\x8a*\xb22\xf5\x0f\xdc\x9812:\xaa0\xd8z\x976\x1bp\x0fW`\xfe\x0fS&\xc8P\x8a\xcd}\xdb\xfcQ\x0c \x89c\x925^ji\xdd\xa6\x1by\x9f7\xe9\xd2\x16\xdf\x86\x87\xfe\xcb\xeb\x13d\x9bK\xac`\x8e\xa1U\x17:\xc5O?\x03\x9e\x11\x14\x8b\xc7\xd9\xab^\xfaJ\xa2\x8cM\xfb\x8b\xc1\xf4>\xaa\xa6\xe0b\x9bi\x02|\x14\xe7\x99\xb9\x9fY,\x9a\xdd\x82\x0bf\xbc87\xcb\xfc\x02\xe4\xcf.+\x93\xc0S\xa29\x83#S?\xf1\x07q\xf6\xe0\xf9\x13\xf8\t\xc6\x1d\'\x9d>Q\xd9.B\x12\xea\x03\x920x\xd3\x99q\xa5k\x95\xdc\x9b$\xa7p,\x1f2Sp\x08\xba\x1b\xfe\xc2\rT\xf0\x82\xd3l\xbecV\xaa:\xe5#H\xca\x8e\x8bbv\xda\xa2\x89-\xb5\x89T\xdd\xd4\xf6O\x87l8\xe8Op\xfc&\xd8\xba\x95J\xa3\xc9_ie\xa9 \xdct\x8dF\xfcg\xa0\x99h\xfd}\x0c\xa2\x15\xb0I<aw\xd9\xf7\xd3\x80\xa0\xd4\x96|m(}l\x15\xff\xbf\x98\xc7\x1b\x9a+U\x1f\x14A\x0b\x1a\xac\xde\xa1-\x10\x95\x90\xb1\x8b\xb9C\xed\xa0\xb8\xc5t\xbd\xdf%\xff\xa2<\xc8\xc2[\xb3dJ<\x18\xc6\xcf\x9f\xfa\xbf\r3\xcbK\xed23\xd7\xeb\xf4\xf7+\x97\x05\xa3\x08\xc0WT\xa8\xf40\xaf\xaa\x1da.\xa3A\x8a\xe1\xefs\xda\xd08\xb6\xcd\x8b\x9d\xc6\xc1\xa7\x8e\xa3\x82\xa6!;l2\x19\xbe3M\xce \xff\xc1\x8f\xdd*3cJw5\xae\xf3#\x8eF\xca\x86=\x93\x8f\x89\xec\xbd\x18hZC\x93\x9b.Z\xd6b\xe1^\x08\xea\xb5lmr\xf8\xed\xed\xfd0N(72\xe0\xd3\xd0t=N\x81.\xaf\xbeE.\xf8\xf7\xbe\xb7\xbf]\xbbx\x13\xd8\xcf\x9a;\x0cQU\xbc\xc1~\x11\x14\x11E\x14\xebA\x17\xee\xa2"\x8c\x91T\xdc(x.\xf2E\x9d\xe7\xe2?\x8c\x07\x19\xa3\xa1>o\xb0\xab\xef\xb2\x14>x\x87\x17\xe5\xcf4>\x13\x92\xe6\x97<\xad\x02\xd8)\xfe*\xac\x91\xdb\x8fiW\x1e\x02\xbaXm"\x9e=\xf8\x92\x81\x9f\xaa\xe2\x1b\x8b\xd8\xd5D\xc6|\xd9m\x98\x94\xfb\xab1k\n\xf21V\xe53\xd1;\xeb\x9e\xeaW9\\\xc1\x9b\x94\x86\x9e@\xf5\xdd\x04\xa5\x85\x95\xdfO~\x8c\xf8\x19S\x1a\xcb\x11\x0b\xa3\x1e\x10\xba)\xa5\xfb\xacE2\xe7y\xae\xfc#~."\xf6`\x92r\xf9\x996n\xf2\xaf" \x9fSv\xaeM:\xd1\xb2\xe5\x15l\xb7P\x91*\r\xf1\xbf\xb7\x9a\xa8Yz\x06\xe1n\x96\xba3\xab\x91J\x0c\xc10`\xb4\x80\x9f\xc6O\x80/\xef\x14\xed\xf4\x14~\x02y\xf5l\xbd\xef\xf4\x005\x1f\xef\xe03\x84\xb4:\t;\xe2\xa1\x01\xf6\xefJwa\x92\x8e\x0b\xfd\xcf\xaa\xber3\x18\x15\x02\xb1\t\xe4\x98}\xa9\'\x96\xd6\xf5#\x8d\xb0~\xdcr\xedxZ\xd1\xd7\x80WK\xa4@\xa6\xea\xd7\x02\xe8gZy?\\XE]W\x1a\x04\xc2\x82\xef\x9c\xaf\x92\x8e\xd2\t\xe5Wb/\xc4\xaf\xe6\xc0\xc3p\x8e\x081I\x80x\xad\x92\xc8I\xafs\x10\xe3\xba\xbd_$\xa69\xde\xeb\xb3a\x07%C\xfb\xe7\x96^\x05Rj\xca\x16\xf2^.]|\xe7\x9ep"?\x0fy\xb9\xdc\xb6\xb8$\x05\xb5\x05\xb3\xe9\x86g\xb8\x1c\x90\xb7`\x19\x15U\xd0\xbeT\xc6i\xe5BS\xb8S\xb2F\xe0\xfeTH\xf7+\x83W\xa5\xee\xa6\xf9$\x985\x1cKi\xb0es\x06:m\x9b\xcf\xee\x9f\xae\xde\xc3\xac-cy|\xc6\xbdI\xbfT$\x1e\x12\xc5\x00\xa1\xb3\xab\xfe\x92<\xf8n\x91D/\xbf\x8e\xe1\xda\xf1\xe0W\xe8M\x13\x03\x94\x02G\xf7\xcc\xbc\x19\xe5\xd5\x86\xa454\x9f\xb4\xdd\xf3\xd0\x95d\xe5L\xaeZ%\x90\xaf\xa5\x83{\xd8l\xed\xd5\xecLG\xea\xd2\x88\x07g\xb3\x1a\xc8\xca:\xc7\xd6\x8e\x9a@f\xa6\xc8pt-\x9d\xb4j\x86\xeb]*1\x83@\x89;R\x9e\t\xae\xe8r8K\x7f\x92\xbbyR\xb5\xf33\xdd\tX\xc2\xcb7\xe7\t\x9b\x006\xa1a\xc6,1)L\x07\xda aMx\xc7\x19f\xef\x0b\x85\x8c\xfcVe\x00\x06\xf9x2f\x88\xb11\x83\xbb\xdb\xa9\x86\xa8~?\x90\x0b\xd5\x01\xa2?\xdehM\xf7\x13\xabfCC\xb1\x97\x94I\xea\x7f\x8f\xca\n\x1b\x9cc\xfb2N+ m\xbb\xff\x07G/\xd6\xd0\xe0\xbd\x94\xb9q\x9b<\xc2#\xef\xe0b\xf9M\xcd\xc5H\xb1\x94\x91\x0c\xbd`@%n\xcc*\xac\x9c\xe4W\xdb\x9c\x1ae\xca\xeb\x11\x14\xe5Z}[v\xd3\xee+)l\xda\xe6/\x12\x8bw\xcckW\xc1W\xe1\x9fq\xa6\rb\xc3\x0b~\xd5\xfc\x87\xa9eS\x81\xedd\x06y\xe7\xd3\xa5\xb2\xfeeQ\x14>\xf0\x16\xcaz\x9c\x11\xadH-\xb1\x04"w\xb5}F\xcc\xb8\x91\x05r\xe3N\xd3\xdd\xcd\xa4\xbbpz\xb9\x8eQ\xeb\x0f\x0f\xf5\xb7\x1fnZ\xb9\x06\xa8J8\xe0\xbe\xb46\xd5ST\\\xa5\x90\xc2\x0e\xbe\xf6\x1fX\x90\xe8\x10;\x85Z\x1e\x1e\xb7mL\x82\x08\xa2\x93\x83o\xceNM\xbc\xce\x97JG\xf7g\x02w\x12\x07\xb2\xd8\x00\xc0\xe1\x1d\xc4a\x18?\xcb\x9d;\xa3*\x962"Q"88\x06\x1f(\xabC\xbf\xce\xd8\xde\x17t?\xfc\xaa,\x17-\xdd\xaf\x83[\xe2\xdb\x8e\xbb\xfcY\x0f\xcc\x99\x8b\x0b\x12\xf4\xc1\x86\xd5i86\xf0"\xac\x14\xefJ7r\xb1\xf5\xa7\xb6\xcf\xb2`\xfa\x86\xb4TDC:\x06\xa5\xec\xef\xec\xe8\x1b*\xc7{\xce)\x8e\r\x06\x8f\xe37\xe5Wwk\x90\x10\xc9)\x17\xe8\xb4C\x04qq\xa2\xb2\x9d\xb4\x06\x0b!CKNz\x9cYn\xd0\x9a\x05q\x07\xb7\xc9L\xc7\x02\xaa\xb4\xb5Y\xa9\\Or\xcfPj\xd5\xa8\xc5\x84\x95\x0bL\x0e\xff\xa9H\xea\xa26\xe2\x15\xfff\xc1w_\xef\x01\xd2B\x80~\xaaw`\xef8\x90\xb7\xb7\x1df\xb3?\xafl~\x18\xc8o\x94\xe5\xae\x06?\x0e\x02\xd2o&\xf5\x1e\\l\xb9\xaf:\xf6\x02\x02y\x88\x08\xc8\x9d\x17\x0f\xf1\x8c\xd0\xf4\nS\xdb\xfc\xde\x89\x18z\x91R\xa9\xe9\x92_\xd7\xcdT\\\xcc"\x9b\xd6\xe4b\xd7\x1e\x965\xfd\xec~\xe8\x16\x0ej\xb4\xb9\xf4\xf0|\x9a\x0c\xfd\xa0\xaf\x0cu\x92\x7f\xdd\x1f;y\x95\xe9\xe8t,\x9d\xf2\xa8\r\xef\x00\xa5\x9d\r\x17\xeb\x05K\x84\xfb\xa3f<\xde\xcc\x99\xe3\xb2\xd2\x8b=\xd23\x06\r\xa9(.\xbac\x1f\xe9\x07\xbf\xb0\xa2\x11\xd0\xac\x89\x04\x87\xd6*\xcb\xe9\x8d\x07\x11\x7f\xc5\x0b"V\xcd\xdem\xffJ\x05a\x1f?\x9d\r@[\x85\x08\xac\xfbd6lS\xb2\x1bz+\xb7\xdd8;o\x17\x02\xd1i\xa0\xa0\x9cv\xb8\xc3\x07\xef\xa5\xa6\x8a\x9f\xfe8-\x0f%C\xf4\xd2\xc2K\xe9\x92\x17\x07{ \xeb\xcb\x80\xd8\xa7W_\x1a\x15\x021V\x821?g\n\xdfIw*\xa1\xbb\x1ay\xaf\xd7f\xc6VT\x89\xc9F\x90\xaf \x83=\xe7=\xbd\xb5M\xf2\x1d\xb8\x07\x87\x1ec\xf8b\xc7=\xf9\xa5\xee;c%\x88i6\x0e\xe7"\x13\x9a\x01\xa5\x86\x94\xcd\n\xa9\x1d\xbb(;0\x17\x06\xf9\x05)\xf34\xad\xdc\xfeN\x176#f\xd8\xb1\xfb`B\x16\xa8\xc8\xe1?\xc4\xf9Lx\xd8\x00L\xaeK\xb9\xe5\xb7(^\xdc\xdc~\xe5\x83x\xc8]\x041\xed\x13\x8d\xe7\x00\xdc\xed\xf8\xe7\x8b\x8d\xee\xaeW\x16\x0c\xf2rr\\\x89\xad\xfd\x14t\x81\xb4\r\xdc\xb9\xb4@FQ4\xd9\x14\x0f9,\x97\xf7a\xfe\xb5\x0f\xc2\x94@KLh\x83R\x18>\xa9\x92\x06C1\x86\x1cb\xablg\x9e\xf0\xa3\x01\xe7\xeb\xc36\x11/x\xaee\x9b\x05\xa79\x19\xca\x85\x1e\xfe\xf5~.\xe9\x01A\x0f\x1cp0\x97\x93\xb4C\xef\xcd\xd4\xa2\xebs\xaa\x9b\xe2\xb6Q\x8f\xad:\xdeA\x03\xbe\x95\xb3\xe9\xc8\x97\x0c\t\xa4O\xf2\xc3\x93e\x8a\x98\xdb\xf1\xe5\x06\xdf\xa6\xde|8+\xb84\x8e\x88\x9e\xd9o\x87\xe0\xe7\x9f\x0c\xd97\x04K\xd2\xc8w!\x9b\x1f\x83\xb4F\x9c%\xe4\xd6>$\xc9U&U\x8d\xbeb\x0c\xbdA\x0e@\xcbq\x05\xb6\x0f\xbb\xcc\'5{\x80~\x12J\xdb\x11\xb0\x87\n\'\xcb!\x7f,\xc9\xf7\xe0\x83@)\xba\xe5~\x9c\xf6{f\x96\x9e\'lv4(\x81\x91.\xfe[\xd0\xe2\xda3\x8e>\n\x99\x03\x82\xd5\x1c>\xcc\x15\x8f\xd7\xb1\x11\xa9k\x11\xfc!T\x8e{g\xe0\x9e\x90Q\x02\x04[\xc5\x06@\x17\xa5\x90\xca\xabOWgW\xb4\x1e\x16\xbd\xc2\xf9)3%N\xee\xf4c\x0cg\x19D\x95ynh\xa5,Z\x85 W\xff\x00\x07iA\xae\xdb\xf4\xe1\xa1R\xacG\xa6\xf6\xc8\xc1\x84\x11\x9c?\x15}\x03\x0c[\x81\xba$I\x92\xbb\x02\x98\xb0\xf4\xd9w0\xb2\xf42\x19\xf2\xa0\x010\xb9"\xaa\xc0q\x1d\xa3\xc0\x1c\x9f\xfb\xc7\xeb\xb0\x95\xb9\xab\x07\xd3\n\x14j\x1c\x05\x102\xc3\x98\xc9\xc0}\x93\x19I\xc3v\xe8\xfb1f\xa7\x83p\xc5^\xe3\x86\xa8\x95J\x1f\x99\xd6\xa0\xa4\xcc\xccK,\xab\x92e:E\xfd\xec\xb8n\xf8\xe8\xb5z\xbc\xb2\xa3\xe3J\xed8C\xa1\xa9\xd8z\xcd\xa0o+\x1e\xa6\xaa\xd2\xcd\x84=vn\xfa\xa0\xed\xb0\x14\xec\xdfz\x88>]\xb6\xe7\xd9Q-"\xd6\xdc\xe1\xc0`^\x1f?O\xff\xc0\xa5)\xbf\x98&\xb1\xe5o=\x90\x9c\xd5\xe3"h\xa83])\xab\xa8\xb7\xae\x9b\xc4\xb5S\xf6$\xd3\xe7\xe8\xdc \x82\xf6\x97.\xe9\x07}f\r\xc1\xbe\xb1\x80\xef\xd5\x92\xea\xdc\x96j\xde\xfeC\xb8#so\xc1\xc5\x0c\x9f\x88\xec\xe1\xddh\xba\x87\xee\xde0\x83\xfd\xf4G\x88=\xca\xb1\xd5\xe9\xd0\x89ldy3`\xc9\x1da\xfb\xa8\xa3\x9cN\xb4A0\xb6\x87\xf9x\x02\x0b\xab\xb74\x0f\xc2\x9e:\xa6\x8c\xce\x02^2\xa8\x81\xac\x8f"K\xb2\xbd\xcc-\xbb\xd4\x07)\xd70\x9aa0vn\x15J?\x98_\x91\x8e\x9b\xc7{B\xe5;\x08@\xb5\xdeZ\xb8y\xadk\x0e3\x07yh\xd9p\xdc\xe23\xa1\xb8\t\x90\xac\x01E\xf1\xe0H\x07\x07\n\xabc\x8d \xc4\xb4{U\'\xc8R[^yB\xda\x14\xc5\xe9*Wtm\xca+\xb9-\x91\x8bc\xe0\\\x0f\xd4\xf8x\x88\xa2\xbf\xb7K\x8a\xa9, \r\xc2\xd4vi\x95\xf1]\xfc@=\xe0\xa5\x94W\xda\xcd\xbf\xd3S\xa9,\xcc\xcbq\tb(\x16\x15\xd7\x91\x80\xdf7\x17\x1f&\x01\x04\xe9`9\xe9\x02s%\xe7\x19"\xee\xddz\x14\xf3\x887\xf0\xa1eZ\xa1\x85\xcd\x9b-\xb6A\x12r\x8bXhrkD\xfdkB\\\xc3\x1a\x96\x10\xc6\xe7-\xb2r\xc0S\xbb\xb8\x1a\xea.\x83\xdepU\xac\xe4k\xca\xc2e\xce\xa3\x0b\xd1\xe6[\xe1\x8c\xdc\xd7p\x14\xde\xfeZ\x96m\x02|4f\x94\x9c\x1cn\'\xc9\xb8\x83\x03QQ\xd7X\x8f\x01\x921\xac\xbd\x1c\xbd,\xd7\x12\xb0C|\xe5B\xa7\xb3}\x14>\xc7\xe3\xd9+t\xf7\xaa@%Z7J)4%\xff\x88\xcf\x0b>\xe25C\r\xab\xd4\xff{\xf9\xbe\x92\x9a"\xbf\xa8\xd8\xaa\x0e\xc2\xe7\xc6\xbf\xd2\xda\x19\x93\x9e\xbc\x88\xce\x1c\xf6^f\xe4\xc1\xe1|\xf7\x98\xee\xbc\xb8$\xaf:\xadPG\x8f\xec\xe4\xcf\xe6\xcc\x0f\x9cE\t\x1eb~ty/\xb3\xe8\xcd\x1f\\\x97s\xc5\xe9\xdbw,#\xf7\x8cD\x14\x80\x96\xb4\xe2c\xfa\xf8y\x1d\x9a\xaf\xbe)\xf12bg^4\xa6\xd7\xb4\xf6\xaa\x9f\x02\xee\x9f\x99/p\x80\x10\x17%\x18%^\xef\x92\xfe\x03M\r\x17;8\x8b\x84d\xd1\\3\xeb\xabS\xab\x16\xaf\x95.\x15\x8f\xbc\x94\xd2#CT\x01\rie\x95Fd+\xd3%\x8br\x16\xf7\x10\x06\xaf\xc4X\x11\xf8\xf35\xae\xb0\xb7/\\d\xb3\xac\x81+X\xbb\xdb\xb1\x0b\xb8G\x9b\x9e\xcdP\xda\xee7\x1e\xe3\x82K4\x9d\x00\xf7O\xd6\x12WYo*(i\xaa\xfd\xb1\xe1X\xb54\r~e\xdeu`=\x87_\xff\xb2\xbb\xb8.\xc3r\xa2\xd4"8Hu\xdb\xa09\x8ez\xb1\x83\x87\xdbX\x0c\xd1\x08\xd3So\xf6I\xf3}\xbf/\xa5\xe1\x07\x13\xad\xffv\xb2\xc8\x10\xbdfC\xd5\x96q\x8e9\xdb=\xba\x9aap\x11!\xb1\xbd\xcc\x1a\xf6\xcb\x1a\x95\x9f\xab\xfc\xabn\xec&\x01\xfbq\x9f\xa7\xd8\x81\xb9\xf6)\x00\xed\xf5\xd0\xb3b\xb8&[\x8b\x0e\xcb\x0f3\xfe\xad\x1b\x0bk\x1e\xe6#\xaf_\xba\xf2/0a\xac\x98\xa6\xa2\xb4ld\x9a\xa02Q\xb80\xdc\x92\xa0\xe7\xb2\xa7P\x0c\x1e3?z\xdb\x9a\xf9\xf2\xc2\xb2Q]\x88\xf4\xeb\xd1\xb4\x96%\x8a\x89\xb0\xf4P\x92\x16\xd7\xd6a\xb2%\x12lS\x19\x8a\x85\xeea\xf3\x8c\x06\xcc\xa5\x1d\x99\xe4dg>\xd9\x06\xb5^\xc5\x1d\x8f\x88\xf79\x82\x1d\xb4"\xe5T\xf3\xac\rk.B\xf4^\xdc\xe0[\xe5 \x04T\x01D\n\xad\x81\xc6\xc5k\xeb\x0f\x93\xb4Q\xa51\xcf\x92>\xff\x87q\x94P\xbdP)\xa5\xad\xff+\xea\xf6\x89\xd3d\x04S\x1d<O\x16u\x07g\xd0\xa2Kv\xebs\xab\x1ac\xf8\x1f\x14\xca\xb6\xd6\xe2U\xea\xeb{\x1e\xf4"n/a\xd9\x04\t)r\xea\xe3\'\xef\xce\x7f\x86jr\xa7\xcb\xdd5\xbc\x8d2\x8ePbx\xda\xcf\xc5\xb32G\xc1\x87&\xc9,\x86Gm\xe8\x8d\xd8\xf5\xe7\x9b\x14\x8a]\x9cX\x8e\xe2\xb1\xe4S,;\xa9\xf8\xcc\xb8;+b\xb1\xea\xf2\rH\x96N\xaf\xed\x0c9\xdfFR\'@\xc8K\x07\x13L\x98:\xb9\xae\xef\x0e<7\xea\r\x04\xcf!\x1e\x1e\x18\x1bXZ\xf0\xa9\xd6\xab\xce4D\x99\x15\x05\xa9u<}\xba!-\x96\x96?\xbfX\xe2\x90\xb6$!L\x18\xda7p\x9b\xbc\x86z\xa6>Qcjs\xfd\xd5\xb1\xedw\xcc\xcd\xcb\xac\xb5~\x1b."\xbd.Vug\x91\r\xf0\x14$\xd5\x91\xe4 \x8c`\xbbP\xf4V\'\x07\xd0\xe3\xb6n\x8dK\x07\x8cD(\xa6\xf2_?4\xca\xce\x9d\x9a\xf7\xb7\xf6\x03\xae\xf7\x06\x86\xd5|\xa4.\xbb\xe1cy\xb54E\x08G\xb1\x8d\x0b\x94\xed\xd1\xc1\xd1\xee\xce\x0cf\x10\xf5\xa1`\x90(\x1b\xea\xd6\x99u\xda\xd0\tV\r\x13\xe5xbx\xef7\x9c\x87\xfb\x91\xec\xb0<\xad\xe9\xf75gA\x06$\xed\x9c\xc8\x98\xe0\x89]\xd4\xb8\xb6_\xe3J\xdb]\x9ai\x8c\xd2\x8d!\x83h/\x00\x11\x81g;:\xc7H\xdc\xa5\xea\x91\xd0\xfa\xd8,\xabl\x7f\x93\x82\x1c\xa5\x9a\xec\x8a\x83i\x9e\x14i\xe3wK\xaa\xbf1=\x03\x9b8\xf8\x13*\x88\x8b\xff]]\x98e\xde\'\x1ew\x99:\xb9y\x02\xf4\x17to\xac)\xb6\xf4/i\x93\xecF\xb4\xec\xf30x|5\xd8\xf6\xc1\x8f\xb1\xd0[De\x16\xcd\xb6Pd\x03\x04\xeb\xc2[\xfd\xec\xa8\xab\xe8\xc33\x8c\x82\x8c\x9c\x96\x14!\x9b\xfc\xc3\xe9s$\x86\xa4\x12-\x9f\x8e\xf2\xe7ik\xf3\\\xa3\xab)\xbb\x0b35\xe0\xef\x1e\xdc\xf2}\xb36\x84g\xe9\xb9\x10o\xb5DO\x00\xc5\x1c[\xb1\xfc\xdc==BKk\xa3\xe7\xeeb\x90^\xcb\x97;\xb4\x17\xb0\x7f\x91;\xad"\xd5bc\xe3s\x93d0Z\x9f.\xe1|\xe2xrJ\xa1[\xa1k\x93\x0b\xe5.\xacP\x88\x15s:(\xb5\x04]\x15 \x81\x19\x11\xb1\x87T\xf2;\x19\x91\xd7\x85\xae\x83-C\x19\xfd\x81o|\xeeX\x8cx\x97e\xb5\x80G\x1bP[\x8f\xad\xa3\x02\x95\x92\x06\x13\xc4;\x95X\x9e\x87_\xd2\xa7k\nO\xd4\xb4\x98?\x7f\x93\x08\x06\x99\xa2"K^L\xb3E\xb8os\xaa\xfe\x98\x03\xa4P\xbe\xfd0\x93\xca\x8a<\x15\x1fN\xac\x15\'\xa09\x96XI\x80\x80!@H\x1f\x0e|\xac\x06\xe2*\nk\xd7\xd9\x90\x14\x93\xdf_\x05x\xd4\x15\xf7\x0bA\x17b0\x95\xe9y\x92\xc6\xb2Q{&e\xea\x06\xd0\xff\xe8Ei\x00\x03&\x0c\x17\xa5\x05\xa7\x84\x90\x9d\xeb2\xe8\xf8\x02l\x05TOA\xd0\x1b\xa9\x95z*Q\x05\xc8r=}~\x0b\x1e\xed\xa3i\xad\x8bC\xe2)\xe0\xe5\x9a\xe6\xfe^\x127\xe0\xd8\x05\xca\xfd\xa9\\^H\x18\xac\x9d\xe6\xf2\x1a\x89%.\'wy\x8f\xe7\x91`\xb5>-\x0b1\x8e=\x83\x87{O%(;/\xe1lI\x18\x93\xd4\xc3\xba\xbbS\x97\x99l\xc3\xcb1f:r?MH_{\xbf\xcc\x896\x81\xfd\xc3\xb3Ep\xc9X\xbf\x17\x8c\x19\x15\x83\x84\xef@\x95\xfdg\x13>\x13\xb4S\x92\xb6\x9aB\xfc4\xeb\xce\xf03\xa2)\xcb\xed\nN6\x01`^\x02\x96\x96\xd8Qj\xa80\x1e\x90$\xa7\xc8E\xa9NZ\xd5>\xf9\xf9\xb6M(\xd8\xa67\x9c\xd7^\xbcZ\xef!^<E\xc2\xa4N7\xb6\xda\x06\xd9_\xdc\xf3\xed\x16\xd5+\x13\x15\xf0\xa8\xd1\xe2aH\xf4)Qn\xf75g\xff\x88K\xedfw4\x0b\x81Kf\xbb\x0c\xae\x039\xd7\x8a\xfa}\x04\xb2avv:\xa18\xfco0\xdb\xd8\xcc\x06|R\xfa@\xa37F\xe8\n\xdfI<Q/W\xb0D{\xf5U\xf4\xa8\x8f\xc85\xb3\x880\xa9w7\xac,\xde\x0c\xf0\xed|\xa19{\x9d\x9c\n\xaf\x14\xfc\xf9\x963\xfa\x91\xe2x \x90(k\x10#b\x18\xa9\x0b\xb1\x00c\x06\x14\xb3\x9d\xf0\x97\x08OD\xdbSb\x9e\x8a\xde\xa5i\xc9g\xec\xc7\x8b?4\x10\x1c\xeb1+\x8f>\xfe\x9c\xfa\x9b\xf7\x81\xeat\xe0\x85:;]&\xe95\xdf\x9a\xbc\xa8\x1b\x7f\x0bw\x13\xd9\xa5Q\xba\x8b-\x82\xef\x04\xfa\xcc\xbd\xc0p\x8a\x0b\x9c\xcf\xc7V[b\xb1A~\xa6\x90=\xaeZc\xe6\xe8\xe7\xe3\xad\x1e;\xb24w\x9b1\x0c#w\xba\xca\x0f\xb9H\xb6\x196\xc9\xcf\'\xa1G\x94\x9f\xd3P\xf4\xf7\x1c\xc9\xcc\x10\t\xed\xaf\xad\xd5\xa6\x11\x0c\xf3xGF\x92\xe9\xc8:\trR\x0b\xc2-G\xf8\xcb5\\\xc3~B\x87\xd2\xe2\xa8\xd1\xdeS\x115n\x01\xde\x1e\xed\x1d\xb5\xb0\x8bH\x83\xf1:\x92\xddYQ\xca\x0e\x10Q\x17,\xed\xb1\x83\x8c\xffn\x16\x07\x01\xdcG\x84d\xa8\x91Fg??\xf9\xa5I\x82W\xa7z\xbf\x1bS \x04\xceQ\xa5z\xb5\xbb\xf6*%\xf59\x900At\xaa\x17\xe5oG\xcd\xe5\x92\x7f\xc8\xd7\x8e\x1e\x1f\xdd\x87\x12\x19\xb8\xbb\xd2KX`\x13c\xc6l[?\x18\xe2\xbb[\xa6\xd3\xcbr2\xf0"\xbd\xf3\x9f\x9a\xd3\xc0Uq\x9f\x92I\x19\x1b\x9c\x12\x8a\xcf\x16{9\xe6\xaeX\xd6\x12\x0f\x80C\xd3\x84\xb2\xa6\xfd\xca\xbcF[\xd0E\xfbAx\n\xa6\xcei\xb1*\x8cQ_\xb0\xf3\xcdr\xa4\x99\x1c+,\x8c\xb2\xe6\xf6Y\xdd\xc7o\xc1\xbc\x1e\x19\xb3\x80$\xffr\xbag\x10{3\xa4.&W\xa1]\xf9\x97v\xcf\xf4\x8ai\xf4u\x8d\x1c\x96\xdb> Q\x9ev0\xec\x03\xab\xf3\xdf\x08I\xcd\x13\x92kP\x8e\x11\xe2\xce\x8f\xaf4|\x7fx\x0c\x97\x8e\xb2\x1f\xff\xbe\xde\xac\xce\x0c\x05z{\xcbE7\x17\xc7\xb8\xe4\x8a-!\x15S&\r\x1b\xaf`\x14O\xf6\xb7\x1a\xba\xbc\xe9\x18B\xf8{\x81\xe4"CN8\x920b\xf2A*\x04S\xdc\x1c7\xffd\x0e\xce]\xde m\x01IEU.\xc9 \xc4\x01P!\x9b\x85\xb0ra,\xc2\xba\xc3\xaf\xb8\x90L3d\xcc\x80\xb8h\x1cm\xd7A;\x9bo\xf18XuM\x1fV\xc9\x15\n\xf5FC\xc9\xd3\xc3]\xa1\x02d\x8f\xeb\x08M\xbf\xd05Lc\x11j\xadx\xb2\x85)\x08\xe7p\xe6\x04RO\xf9&\x9dS1\x9a<w\x82+\xfe\t\xbc\xa1\xa2\xe0\xae\xbd\x81BX\xa5\xed\xd0\x11Y\x1aF9R\xaek\xe3\xbe\x08\x92\xda\xde\x80\x13\xeb\x9e\xbb\xccZ4#\xdb\xfa\xc2b\x7fC\xd1\x047\xc1?\xa2\xe2\\\xdf=\x80@\xd7\x08\xfcg\x07>r\x0c\x0b\xe6O\x8aN\x84*\xed\xe7\\\xb2Gi\xdc\xf7\x03\xbb\xa7\xf9*\xd7\xc8\xca\xfcB\x8cc\x1ad(\x01\x9f\xe6\xb8\x85\xd6\xc7\xb8;$\xf1\xebW\x9bN\xc2{\xbf\xd6\x05f\xc1\xcb\xa7\xa3\x05\xea\x0b\xf4\xc2\nXq\xf0\xf8$\xa6c\n\x94\x06B\x0c\xd7\x07w=\x9d\x9e5\x99\x995\xcc4\x05uc7\xcf\x96?S\x1flV\x85F\x0c\x97\xe7\xe4|\x8b\xed\xe9\x95i\xd8\x13\xf3\x02q\xb8LUDr\xe2p2r_\x95\x08\xe2\xe2\x03\x8e\xa00\x8e\xfeyg\xf9\xe33\xf8ctx\x18`\xfbT\xe3\xd9z\x02\xde`2\xbb\xceZ\xc0WR\x17S2%\xb3\x12Jc\xb8=:\xc7\t.G\x05\xf7\xa3\xc4\xc8+\xdf\xc5\xa5\x89\xb4\x91\x16\xd4A;\xa52i\x00\xdc\xf4\x0e:\xfe\x19\x97h\x15\x8f!\xba\xc6\xe7\x10=5\xc6\xdc\xd5/\xf1\xb3\xda\x13\xac\xad\x06\n\r\x06\xa7\xd4\xd1\x98\xb0\xdd\xc6\xd1\xc4\x80\xd1h\xa9 \xf0\xc5\xbchX\xa0\xf2\x98\x16[.\x8an(\x10\x1e\xf4\xf0\xd3\xb7XT\x88\xee\xaeNiwZ\xb6H^\r\x93\xe0\xc9\xac\x04\xeewy]0\'-\xea\xa1U\xe3\xc7\xb9\x00\xdd\x80\x1c-8\x158\x0c\xe5\x05DK\xfb\x94\x15NH\xfe\xd7r_\xe2\xf7\xa2SL\xce\x97\xfc|\xee\x8a\xd0\x9b\xd2\x87\xb5=q\x1e\xb6Z\xfe5G\x98V\'\xb3\xe4=\xdd\xb3\xd3A\x1f\xe3\x88\xa5H\xff\xc6\xbezU\x8f0\x8bt\x82\x95N\xbc(\xa2\xaeBq\xb1\xbc\xf6\xe3\x95,\x9c^\x15\xa7f$\x9c\xfa\x1b\x85#\x8e\xf0\xcfi\x10\x8dM\xb5\xa1`\x12\x03\xden|\x02S1\x95VD\xd0\xd8\xed\xa2\xdc\xd7\xc1\xe9\xde\xb4\xe3\x80\xe3\x89|\xd7\xa6\x14\xf6\xbf\xd5\x10y\x9d\xd9\xef\xed\xb3\xa9H\xb2\xdb\xbf:\xc8\x90Vj\x14?\xf6\xcc;\xc2n \xf6um"\xc8\xc5\t\xbei\xe4\xc1\x08\xf9L<\xec\xdf\x06\x07C\x0b0\xd4\xe6B\xc1u1\xfc^p\xd0\x04_\xc3WE\xd85\xc8oH_\xef\xc2\xe3\xfe\xdb\x98D8+Cg\xa9\xe58+_\x17\x0b\x9a\x9aEb2\xf2ax\xa4\x87\x16\x90\x8bR\xfe42\xf5\xe6\x03\xd5:\xf7\x06k3n\xb3\x0b\x1bC4\x15Y\xf9S\xeb\x99\x92m\xeed\x90\xf3\x12\xd4b\xca\xa7Y9\xeb\xbbN\xdf\xab\xa4\xf97#\x92[m?\x8e\xdd\xb9\xf9&\xa7\xb1M\x1d\xf6\xfd\x0e\xa5fK\xe45/\x94W2\xaf\xe7\xd069yQ\x95D\x06#I\x9b\xc5Z{\x15k\xf3`\x88\\\x15J\xc7\xd4\xb8z\x15od\xff\xbeh\xc6`\x05C\x90\x89\xf7Z\xffT\xb6d\xa54 /\xaf\xb7h\xa7%A9\xed\xbbP\x97\x99\xf8\xc3\xe7\xa5\r\xdczUL#E\r(\xf2@\xe2\xc1S\x91\x9e \x0c}y\xe7 :\xdfW\xc6\x0c\xc7\xfc\xde\xf2\x9d\x0cHA@g\x0e\x85\xc2\xd7[C\xb5\xcb\xc6/\x91\x0c\x8cg\xde\xac\t\xfc=\x91\xde\n\xe7A/\x07b\xfb|\xee\xb3\x84f\xf4~eV\xd3\xab\x80\xbfq4*%\x9e\xce\x9d\xeaa\xd3\x980.\xaa\xd4Z(\xaf\x8d\xd8\xe1\x9a07[\xf9p\x1c\xd4\xfa55>=\x84tL5s\xdeH3\x8c\xab\xf4\xe2\xeb\x06\x9a)YI\xc8P\xce3\x8e]\x8d\xfd\x80\xf0R-\xf3h6\xba:\x0eM\xc0\xa3\xfa\xf79\'\x80\xbe]\xe9\\\x02,\\\xea\x88\x88\xc3\x9c4\xf1;\x01/\x8c\xe9b\xb6\xb4\xce4\x89\xe0\xe8.\'\x1c\xa6\xae\xd8\x8e\x8c\x9c\xb5\x0b@\xa3\x95\xd2\xb1\xac\x8c6\x86\xb3^IP\x1b\xd9[\xc4\xba\x1b\x9aJ\x8d\xf1%AsH\x97y\xb4\xb1L@\xbcs\xce\xe3\\\x1c\xe9~,&*\x98\xcf\x11\xb4\xf2:\x1ex;\xf1\x9c\xa76a(Ai\xd04-O\x0bX[\r\x93\xcaj\x88\xc5\xd7\x02\x92 \xc0\xf5\xa2j?<\x12}\xd7\x12\x83"\xdd\x10\x0fiGj\xd5QGF\x8f8A\xd5%}\xe5A\x19\n\xfdN #\xd6[\xe93\x88NP\x891\x06\x05\x07\xfc\xb2\xcdH\x82Y\x10\xda\xb92n\xd6\xe2P\xe1\x89\x8a8\xdd\xfc\x1a\xe1\'(\xb0X\xbcZC;HO5\x9d\xdc\x1c\xd4$\xd5{N0q\xc3\x94\xf6\x9c\xac\x15\xcc]\xf8\'\x1f\xc9\xce>\x80Z\xe4\xdfh\xe9\xf0\xc53\x152\x97\xf9.5\x96\xd5\xfc\xc5\xe1\xa7\xc5\xcb\xe6\xbd\xba\xe5\x91*\xbf4\x95\xd5\x97\x8c\xe2\x05\x99k\x8cy\x10\xb7\xaf\x1b\n\xdeU\xe9}\xf1\xd6\x83\xb5W\x05\x86\xf2\xef\x16\xbc\x96\xc1\xf8\xf8\xc2i\xc1o\xee\x812\x00\xec!m6\xd8\x17\xab\xe0\x0f\xa2\xa2_\xf4y\x1a\xf0\'\xc5?\xe6\xecL)\x878)?*5\r.\x06U\xa6\xa2\xe4H\xde\x15\xc5h2\xe45\x94\x18\x97\x9e\x0f\x19\xb5o\xa2Y\x06f\n\xe5\xf1\xd9\xe6\x1dr\xa8\xd8i\xc5\x0c!\xc4D\xa0L\x9b\x98\xfc\x06\xdc\xb9\x80;r\x18i\xea\x124:\xcc\xea;\xd8\x12\xe07T\xeb\xa8\xc0\x00\xa1\x8e{\x18\xd3y\xc8\x078 \xa8\xcf\xfbov\xba\x93\xaf\xc3V\xec\x8f\xa9\xe1Hm\xb9\x9b\xb9E\'\xaaxoX\xf0P\xe6\xc6\x0coK8A\xab\x04\xba\x89 *\xa7\x93\x01,6\\\x1c)\x8e\x8b6\x93\xa9\xc4\x07\xa1~8U\x02\xb715;\x1ev\xb9\xe8\xb8\xc2\xca\xdf-\xff\xe8\xaf\xfe\xa8V\xf9x\rL7^\xdf\xbc%\x07\xd2\xf5\xa3^\xe9t\xa0\xf1\x7f\x9ac\xa4Ul\x9d\xbe5\xc9M\x9e\xd5K2\x19|\x85\x95\xf1i/\x02\xc3\xc2\x8d\x11\xd1\xd7\xea\x89\xef\xa7V\xa6D\x7f\xa2\xf8\x0e\x89nF\xe9\xfa8\xe0\xad9j$h\xe2\x1baES\xfd\xaa\x15z\xa8|\x9f\xde9~\xd6\x08\n\x13\xd2j\x11\xce\xfa\x1a\x8b#\xa7\xadd\x87Me\xf1;\xe1UQ\xdb\x07K\x15<\xf2\x0eH\x8e\xda\x81\x19Gu\xb8X\x92\xfe`h\xff\x89\x94\xdd\xe7\xb9\xd52\xc3g\xbf\xc1\xe0\x90\xa3I\x82k\xb1q\xe8\x147\xe5)\xae\x8b\x884}\x0b\xff\x92%IH\x84Y%\xd4\r\xef{K\xc4\xdf\xcck[ \x1ew\xa2M\xc6\xf8\xef\xa1\xfa\x9a\xc1\x133E\xab\x17\xf0\x86>\x99\xdb\xd8W\rG\xb8\xa7\xbd\x06|\x91Z\xc9ij\x0b\xb2U\xab\xff\xba\x95>\x95m\x18\x14\x9e\xfd|:\x04&\xff\x98\xdc\xb7\x86\xbc\xd4u\x93\xb1\xb7z\x15oU\xd5\xa1P\x95\xdb\xbb\x19\xf1\xee5&/\x9au\xac\x95\xd3m\x14\x00\x91\xca\xce\x80\x04eS\x1d\xdcE\x89\xa0#\xe4\xbb\xf0o\xac\xb8\'4\xd1\xc7\x84\xdf\x9f5\xb4\xb6\x04\xf6\xfe7\x97\xac\xbe\x97\xf7g\xc1\xbc\xbd$\xd8(\xde\x06\xd0\xe9/\x14N=K<\x14\xf0(\x84@\xa4\x97D#m\x9a\xe9UV\x9a\xa8l\xc9\xbbbXW\xfb2\xf1\xa7\xd3\xdb<\t`\xbd9\xa0\xadJX\xbe\x86\x04pe~\xafSO\xab\xdd!\xb3\x91\xc6\xef\x86\x19\xafe;tw\xb7\x94\x02\xeaR\xd8\xe6\xed\x13#w>s\xf3\xa2\xbc\xb9{\x8b\xec\xb0\x87\xf4\r\x1fk^\xd9 \x94Gy\xe6Q\xe6>\xcam\x88\xe4M\xd1\xc4o\xee\xac\xac\xbdP\xad\x8d\xde\x9b\xedf\xcfM\xa9hY,\xca\xbcK\xb1\xc1\x87x\x1f\x80Tv\xbb\x87\x9b\xb9\xaf\xb5\xb3\xb6\xcd\xee\xcf\xc0\xb0^\x14)\x16\x17\xc2\x1b\xe4\x83\xcc00!T\xc1\xc8\x19\'\x17\x8c\r\xcb]\xc1I\x07\x0e\'\xdc\xbdc\xaf-8n\xa61[\x0c\x8e\xbd\x17\x98\x19Y\x83\x83\xfe\xcd\x86\x90\xe6\x0c\rnHz\x82i\x921\xc4\xa5\xdcI\xfd=>*\xd02\xef\xfa\xc0\x82e\x93\xcf}\x82\xaeT\t\xcd\xcb\xaa#`\xff\xc6g\xb6\xa3EE#\xe2\xc0\xe1[\xbb\x80W\xbb\xff+\xf2\xd2\x12\xc8m9\x88saj\x8a\xa4z\x95M\xae\xe2\x9dH\xeb-?\x86ZM\x0e\xff\xa7\x86\x99 \xb4\x1b\xc3\x17\xb0\xe0\xc5\x96s\x8d-z>e\xaaGo\x10\xa4\xa4/\xa6\x82i3\x00^,\xcd\xc4\x80\x96s@\xc9\x1f\x81\xa8\x80\xb8\xf4\x91k0(\x10\xda[~\x19\x91\xa5(\x9a\xbf\xca\x91\xf3R\x83\xc2a\xb9w\x84\x96\xd3^\xf0&\xf7\xc0p\x99\xd5\xf5\x1b\xc5\xd4lNDr\xf1+n\xb9\xfc\xb68>\xf5}v\xd0v\x14;\xbb\x17S\xe3\x82\xd4eGBZobq\xbc\xf8\xb8?\xe3\xddV\xdd9}b,,n\xb4\x88\x9e\xc1c\xafcc\xbc2\xe3-\xe7\xc1\xb38\t\xc1\x99\x18T\x01\xad\x81\xdf>j\xc6y\x9c;\x13\x89\xdc\x8f\x959\xa3GD\xc9\xd5\x18A{!\xd1\xd1d\x18\xdc\x0c\x82\xc3\x9c\x1d\x7f\xdbv\xadx\x01\x1e\x1cYX\x17b\x9b\x89\xdfB\xd6\x8c\x8f\xeb\x80J\xd2\x01H\xa1\xfa\xe9\xe2\xaa5\x94<\r\x10\'\'$\xc2&\xbe\x1a5\xb3\xd1\x8d\xa4\xf6`-_\x7f\x16i<Q;"5CM\xf0!-\x03\x00sR\xdaR\x8c`\xfdC\x87\x8d\x00\xa0{2\xde\xe2\x0f|OP$3\x0e\xaf\x05\x11\x02\xbb\xc8\x03\xc1\xbe\x0c\xa9\xba\xf7$F\xab{+\xf9\x16c\xd0\x99\xbb\xc4\x96\x0f\x07\xbd\x8c\xf3\x19\xda\xee\x0e&\x9dWB\xec/\xc3\xac\x02\xb0D!\x8a\xb2\xbe\x9f\\\xd9\xb4\x89\x12P\xd2\xa25f\xcb\xbc\x86,\xf5f\xae|\x0b\x8b\xe7\xab;\xf9\xd5\xcb7\xde;\xe3~\x11X\xe6\x90\xd2\x97\xaf\x01l\xd5\x0fQZj\x01@\xfa\x9e3~\xc2E\x06-\x93\xf6\xa5\xd2\xf73\xa1\x8cIq\x8bWb\xbaf\xd9\x16\x85\x0c\x9d\'9\x85\x18t\x07\xf9\xadD\xf5\x99o\xd5\xa9v/\xaa\xa0\xc1\xbe\xc9\xfd)\xf4\x16\x92 \xffVq\xb4\x976\xd0\x07\xb9\xc4\x95N*<5\'|\xbd[\x10\x16\xf8\xf6\xdf\x9d\xad\xd5\x0c\xd7;7\x0e\xb4m\x05\xb25\x89\x15\xbc\xcf\xefn\xa8\xed\xeb\x07\xc9\xa7K\xf0\xa0\xaf:7\xe5\x98\xc4\'\xef\xb3\xfe\xe8\xcd\x83U\xc6{\x98&F\x92W\xb6\x03/\x98I\x8b\x81G\xae\xd9\xe58h\xf5\xa5\xda\x05`\xfe\rb#\xc7\xf1d!x4\xfa\xf0\xf4#\xa2\xcc4\xf5f\xa3\xafwK\xa5\x98a\x94\x0c\xa5\xed\xeet\xbb>\x9cB\xaemM\xdaH\xcf#\xe9\xb5\x1du\xfa!\xf4\x1f\x96\x8c\x0e\x03rn\xcc\xde[\xe1C\xcc\x80\xd6F\x1b)\xed\xb6\xff69\xbf\x06D\xd5yf/\x91\xb5"\xc7\xd9bA\x98\x12U8\x89\x1a\nmoY\x17\xb4\xc3\t0\xcco&>\x06\x8d\xa7W\xd3\xca\x03\xf3\x1f\x94gu\xad\xcf\xd7qv\xfcF\xc5\xa469\xce\xd8*S\x86F"]\xbc\x18&\xabg\xa8\xa8\xae\xd0\xdf#[\xc4\x0f\x06e\xce\xe6\xdc\xa9\xc9\xbbc8n$\xe8\x93I\x14~:\xf2\xd0\x0f\xda.\xab\x8b\x1c\xcc\x10z\x84\xc4\xc1\xf9&\xe00Mxs\x15WV\x7f\xd4\x1eA\x97{>R\xf1\xec-_\xb0q\xc7\xd5e\xb9\x13XQw0`\xeb\xe0k\xaejE\\0\xb46\x1a\x02e9\xac\xb1lgGl .\x88\x92~\xcbp\xa7+\x84\xa7x\xa5\x80\xba\x9d\xb0\xbfR\x0f\x97\'\xde\xa1\x83\x1a\x89\xfb\xbcK\xc4\xfeZN\x8e\x12q\xd2h/"\x11\xe9Y+8\n\xd8?\x0fy\x8cR\xc7\xbc\x06\xf9\xb4\xa3\xba\x99\xdf\x8e\x96x\x1d\x89\xad\xc5\xd5\x1eu\x8c\x8c\x0e+\x82\x04/\xad\xf9\xecj~j/H\xc2G\n7\xfe\xaa\xbd;_5\xf8\t\xb5\x08\xdd\x0b6\xfe:\x08\xf4\xaaH\xbe>fn\x84Q\xe5\x10\x01&\xa1\xbcW"\x7f*\xee\xf9I\x8b`m\x8a\xc8a\xe7\xb0\x08\xc4]\xdd?\x9dAw\xfe\x07\x1f@\xe0\xbf\x06\xdbr\xd3\xaf\t|\xdfcq\x00\x1f@\xe0\xbf\x97\x14\x89\xaa\xad^\xa8\xb8\xbe4\xd9\x1a\x91\x07U\xd1\x87\x8c\x94\xdd{Y8\xb760\xe4\xe4c\xed"\x80\xa1\xd6\x8f\xf3\xb7 \x94Kh>D\xf8o\xf8)/\x04|I\x99;\xc5\x9d<"\xe8\x93\x95X\xab\xb1IJ\xca\x0e\xce\xa8\x84 !7\x00\xa3\xce{\xcd~\xd4i\xbe$t\x89\xfc\x82\x90\x81:\x9c>\x06\x80\x1c\xb5\x8d\x9b\xe1\xed\xb2\'\xbaHLO.\x80\xc7\xfe5\xd8[")\xf9\xbf\x00\x1b@\xe4\xbf\xea\xe1\x8d\xa3@\xb6\xe1\xd9L\x91\x00\x1b@\xe4\xbf\xf6\xe2<\x15\x04\xbe\x0c\x8b\x087\x99\xf2\xaa]\x17!i\xcb\xd4\xdc\xaeO\xfc9\x92\xfb)o\x03\xd9\xf6\xcd\x12\x83\x1e\x08\xa23\xcd\x9f\x84\xe0\x8a\xc9\x92\xcbD\x91\xd9\xfcr\xebE\x8bW\x0cF\x9f\x0f\x16\x7f\x80\tm\xa9\r\xbc\xea:-(\xf9\x9a\xcd\x16\xec\x1c\xe2;\x81\x88\xc1\xb0\x05!\xe2\xb4(\x1bn\x1b\xac\x00\x9b|\x89\xbf\xea`\xc0(\xab%\t\xc6\x9f\x1d\xea\xfc\x1f\x00\x17@\xe8\xbf]\xeb\xd9\x13q}\x88`#_\x00\x17@\xe8\xbf,\xff`z\x9eE)\xcf\xe3\x00a\xac\x9ag\xd5\t\xa4\xb4\xd4\x9dX\xfb\x81\x84\xf1P!\x01a\xcc\xba\x8e\x87\xa0;\xf4\xbb\xccX\xce\x14\xe5B\xa0\xdf\xfb\xe7\r\xb7\x98\x1d\xca,\x8aN\xf03\xc6\xcc\xc6\xffa\x1f\x9e\xfc\xb0\r\xf4\x91\xb41\x8d\xb1m}\xbb\xee\x06\x13\x9a-Q\x05\x1e\xc1\x7f\xe4\x95\xb3\x9a\xe9\xe7\xf1\n\xb2^\xb4\xc0\xd3\x98\x82\xd0G\xecM\xb9\xbd\x13}\xa7ZQ\x8f\xf3\xf0\x7f\x01\x14@\xeb\xbfQ\xfe\xdab{r\xad4R\x00\x14@\xeb\xbfp\xa6\x14\x9d\x07=\xc6!\x12\xa4\xf2H\x9eaJ\x08\x00\xc1\xf2\xbe\x90\xf2A\xc85\x89\x87\xbd\xfcb\xc0\xf5\xd0\x13[\xcc\x086\x1336\xff\xaa\xcb\x13\xab\x17\xab\x81;\xebQ\xfe\xb0\xbb\x80p\x86\xe14XV\xf0\xdd\x05\x04\xfb\xa1\n2\x19\x94\xa0D\xf2\xaf\xc4{\x83UsJ\xe7\xb4\x11E^\xd5\x81q2\xfb\x11\xd9J\x03\xa7\xf4\x1ff\x86j\x96Qi\xf9\x1f\x00\x16@\xe9\xbf<w2\xc8\xec\xa4\xd6\xe6\x04\x99\x00\x12@\xed\xbf\x06h\x16\x1f\'\x7f\xc3Vil9\xe9\x185{3\xa5\x94"F\n>\xaf\x90\t%u\xe1\xdb@Sp\xd3@\xb0\xdc\xbe|\xe3 w\x9b\xb3~\xe00R\x92\x92nj\xc1\xed\xc4lC6"\xb9\xc13\xd3\x8ao;\xaf>\xed#[\x83\xb5\xe7V\xa7\x16\x1e\x1cg\xdb`\x94?\xd8\x82>\x802\xdax\x18\x92\xb1\xe6\xaa\x0c\x1e\nf\x1cy\xec\x06\xec\xf5\xb7\x9e\x04\xde\xbbc\xcfY$\xb0\xff\x03\x12@\xed\xbf\xf5\x9b\xbe\xc9\xf7\xa68\x85\xe5}\x00\x12@\xed\xbf\x01\xbc\xed\xadk\xec{\xce,Q\x85\x16\x19\xb5\xbet\xa0\x03\x03>\xab\x1f\xea\x87Wm\xd1\xc5\xa6\xf9V9\x8b\x1d\xa0\xa8\x97[\x81\xa6~\x1cD7\x1d\x16g<\xb3%\xd0\xdc\xf9\xc9\x7f\xe2\x88\xe6\xe4\x15h\xbc\xda\n/\x00\x9f\xd3\x16\xbcX:\xd4\x12\x14&\x86\x06L\xb9C\x1cOP\x1d\xe5\xfe\xbd\xc2/\xfb\xd0P\x93\xc7\xfbsv\x81\xa3`\xc7-)/\xe8\xb7\x9c\x85\xb3]\xdd\x8a\x07\xc1\xcc\xf1 "B\x8a\xfc\x0f\x00\x0b@\xf4\xbf\x8a\xe7D\x16\x94\xdc\x00g\x9e\xe6\x00\x0b@\xf4\xbfr\xf5\xdd\xb2\xaae\x8eZ\xeb\x99`X$\x8d\xf8\xba\x02\x1e\x95T\xae\x93FxK\xe4\xf0R\x9a\xd3\x8a\x05\xc1b\xdd.\xa6\xf0Z^k\xd2J\x86\xaa$4\x0c\x90Y\xfc\x85\x90BL\xb3i\x13"`\xf6\xab:\xf6\xa6\x08^\xb6\x84\x12\x86\xcd\xa1\x8e\xecoaW-,\xa5\x06\xa95I\x118n\xf4\xe4\x80\x0f\xf8D#^\x1a\x954\xf6Y\x93\\\xb0\xaf\x95\x16\xb6\x88\xce\xff\x01\x08@\xf7\xbf-\x9b\xdf\xb4\xc2E\x84M\x14\x16\x00\x08@\xf7\xbf\xbb\x84g\xb4*\x84>P\x98\xb0\xd18`=\xe2F\xeb\xd2\xb4\x19\x91\x8b\x8d\x0e\xb6\xb3\xef\x1511\xf9\xbe2\xb6\x02\n\x8e\xdb/\xa6i\xb60\xc8\x1a\x95_a\xfa\x07:\x93f\x08\xf0\x0e\xfc\xafB\xd9\xee\xde\xbd\x95\xbdg!\xc1\x0b\xee\xc8Df\xc7\x0bQ\x14\xb0h;\xb6\xee\xe2^B\xe1\x84VNu\x03\xd6]\xfe9T$\t\x11\x88\x89\xe5\x8e&\xde\x96T$\x84\x86}\xfe\xca\xff\x01\x03@\xfc\xbf\x8d\xabp\xf1\x06c\x9a\x82\xed\xa1\x00\x03@\xfc\xbf\xc60\xba]\t\xa5\xad\xcf\x01\xaci\xb6\xc3]\xda\xd2\x03\xca\x83\n?\x95\x87\xdeG\xcfR\xd1\xa0d\x83|3d1!\xfd\xdb\x04\x1c\x1e\xe0\xbb\x15f\x00\xb2M\xdf(\xc8/\x84u\xfe\x13\xcb\x0ehJ\xe7k\xfe{y,4\x1d\x0e\xaf\xaaI\x80\xaaK*\xf8\xcebO\xf7\x86#\x01n\x05o\xda>\xc9\x82\xb2\xe6\xc9_Zms"M\xee\xc3C\x1bU\xd5Z\xeb\x8dK|\xfd\xffE\xc4=\x84W\xa20\xdd\x02\xde\xb1m\xdb\xb6\xb9\xbbc\xdb\xb6m\x9b\x1d;\xe9\xd8\xb6mtl\xdb\xb6s\xff\xf3M\xee\xa0\x06\xef\xa8\xeaY\xb35\xa9\x9eT\xbe\xb4H\xb2S\xeak\x80\x98p{/\x1c(\x85\xfd\xf9WX\x15\xd1\x15U;\xbe^\x80R1t\xad\x86\x08\x06\xcd\x95\xb66G\xa7\x92\x13\x18\x0f\x81\xb1\xfe\x85\xb4n2\xf2\xf9C\x97_\x9b\xcd\xc1\x8dd\xe3\x92\x1c\xeeu\xd1\xf3\xaa\x88\xb6\x1a9p+\xb7\x98\x9c]\x8d\x02\xf4\x02Z`n;f\x96\xabp\xc1\xab\xe4\xa3\x88Jd\xd3\x9a\xe8\xa7\xd3[\x91)\x8b\t\xa9\xf2"\xb5\x87\xc76\x84\xd8\xfc\xccs\xcc\x17\xd3|\xed\x07DX\xb8\xb7w\x11+\x1d\x15\xf1#8\x8bg\x13\xf0\x1f!\xc7\x13\xbd*XD\xdf\x81\xee[_\xc4\xec\xd1\xe5\x1e{?\xf2\x01\xf9\xea\xeaY~H\x92\xf1|;\xc6\x18)\xcawQ\xb8m\x0c\x1e\x9d"\xca\x8d\xbf4\xe2_\xe1\x0cW\x00>\x83\x01b\xeerH\x89>\x9f\xd95\xad\xcc\xcf$\xbf\xfbS\xf6\xff\xaa\xd4\x94\xcaD\xb1\xaf&h\xaa\x0f\x0c\xc8\xca!ReD03\x1a\x97_\\\xc0\xeb\xe0\xbe\xc4\xc2mdi\xe9?\xde\xcf\x9c\x86\xac\x81\xbfM\xc4\x95\xd1\xf7\xcc\x12<\xa3<\x9d\xff\x00~\x0bw\xf4^H\xd7\xd0\xd1\xd0g\xdf\xdb\xc8\x02\x04\x84\x07z\xd3\xb1\xd7\xa7z\x91\xa2W\xe18\x9f\xdc\x04\xdb\xb4\x17\xd0>\xbe\x94?\n\xe1\x05\x84\xb9\xa8uq6\xd4_\xcd\x80\xa6\x91\xeeK\x00:\x9f5\xdbx\xbe0\xd4h9r\xde\xe5\xdf\xf1g\x01\xcc\xc6\x8b\xc7\xd5\xbf5h\xebMJ+6\xba\x05\x87\x8e6\x9cm=gM~*\x11\x06(M\xfd\xecu\xa0?a\x1d\xd7\xaa\xbb\x0fx\x82\xff\xae\xf5]\xa9\xc16/*\x81\x8ay\xcf"3\xce\x18\x9a\xe1\xd3\'\xbd\n\xfdo{s\x0b,\xa4\xe0q\xba;\xe5\xd5\xff\x98\x98,\xbb\xe6\xc1\xbc\x846\x94\xe37\xff\xe9C\x8cf\r\x06j\xb4\x9ej`\xd8\x00-\x06\x16\xbb\xb4z\xff\x81H\xd9\x971\x1a\xaa\xc7\xe2R\xf8\x1a8;D=\xb3\xd5\x96_\x06d\x07y\xfa\xfb\xa1\x91\xfe3|\xe3=B\xabl\xb8\xfa\xfaJ\xc6\x04-\xa9\xf8I?\x16\x10\x80\x1d\xf2\xbb\xff\x03\xe7/T\xcd\xf6\x15zS\xb2sq\r\x9d\xf6\xb6\xe9\xb5?\x08\x9f\xf0P/\xba\x91\x17t\x99\xd5!\xce\xab6\x80Kx\xacw@XJ.\xeb\x9b\xd1\x94\xb6\x84\xad;n\x0b\xe3^\x0c\xc3v\xe7\x938\xcf\xab\x0fI\xc5Z\te\\\xe4_\xc6\xa5[\xc6\x96K\xd6\x10/I`[\xf4\xabq\xe6\x1en\x18to\xb76\xdeq\xe6\xa7\xba\xc7\x08#_+9\xc9\x93\x8b\xda\xd66a\xa3\xcc0\xbd\xbe)\xab\xb3/!\x82\x1a\x0c\xe3\xf8\xad\xa1\xf6\x05\x14\x86\xe57U\x01\x15\x89,\x19\x97\xc9\xe6\x15\xd5\x18\xc2\xe7\xdc),\xe0`\xf9\x07 $\xdc\xd7{\xf3\x0b\x11=*o\x9b\xb7\xc5\x12\xc0/<\xd8\xbb\xa7Y\x93bmA\x8b\xd1\rF\x08\xa8\x17\xf2\xe9\xceSr\xba\xd9\x83\xdfuH\xe7\x9bd6\xf4\xb04\xef\x05/l2+d\xbah\xbc\xe9\xcd\x8d\xdbHFoc\xc6R\x85\xeb\x0b\xf2\x90*\xa6\xef!\x17\x10*\x86I\x01Y\xce\xdfO\xf6*\xc6W\x99\'\x0c.\x18\x92\x0c\xff5\x93:l\xd1i\xe6\x01\xc7j\xe2\x9a2K\xb5\x01\xae6\xba\'B\xc1\x89\xf4\x03\xc6)<\xde\xfb5s\xa8, \xfa\xcc7\x08`\x13\x9e\xea\xcd[Qt\x9c\x06\x8e8i\xd3\xb2a_=\x0fT7\x13\xa8\x16\t\x01\x94\x0eN\xa8o\xc1_\x1a\xe6$\xd6\xd9\xe9\xa51t=\x19\xa6\xbd\xa4\xca\xe4\xff\xe6\x82\xde\xc4&\x14\x7f\xa7C~\xd8M\ng\xde\xac\xc5:nR\xcf\x95\x87\xc0\x92<\xb5F \x04\xc1\xc0R\x7f\x06\x8b\xdd\xfdQCP\xbe\xac\x8d\xf8]\xb7\xd8\xdf4\x06Q\x9d\x1a\xbeXV\x06!V\x0eih\xe4\xec\xf4x\xc1\xf1\xbbx4"\x93\x99\x90##\xf0\x07\xe4\xbf{z&\xa9:\x15{\xd6\xc3\xba\xad\xff\xf7\xefcxrj;L9\xb2\xb2\xc0\x1e{\xd3\xf1X_\xcf\rS)R*B\xc8%\xc9/\xdbk[\xef\x92\xee\xe0;\xf1\xd2\xc3no\x1f\x9b\x8c:<\xf0\xd1Ou%x\xd1c \xdc}h\xccp\xa1K^\x8aS\x8eC\xdf#~A<%\xd3\xbf\x16\xda\xe2W\xe4\xb7b\xaa\xa7\r:\xe7)\xfa\xb1\xcbS\xaa\xac\xde`\xb6\xad\xae\xf0\xf7g\xed0UP\x10\xe2\x11\xd1\xc0\x0f\xc8\x7f\x19L\xe6\x0e\x8ciH~\xddS\x83\x02\x98\x84g{sG\xb7"-\xcd\x80\x1c\x98\xb4\x06>\x0eL\xe2\xf5I\xb1\xe1p\xee\xe8Zvx\xd98\xf8X\x9b6\x9b(m\x9d\xcf\xdaj\xeeG\x00\xa5t}\xdb\xe3\x14{\x89\x0e\x97\xa1&dVo\xd5\x04\xc6m\x1b\xbf\'\xa4\xa7q\xe7\xdc\xb8\xd3\xd8\xa79\xd66fB\x1b\xb1\xf7\xa3\xc9\xe8a8&W\xc4\xa2\xfb\x96\x9f\xf6\xa1\xfc5\x14\xc7\xf3\xd9\xfb\xae\xa8\xfe\x123y\xf5\x1e\x04K\xf9\x93\xb5\x02\xd0\xf4\xd3\x1f\x10V\xe1\x7f\xbd\x88\x93\xf9o(\xdd\x1b\x12\xc9\xee\xff\x0b\xe3\xb93\xc3G\x99f\x83\\o\xefRq\x85%/\xbe0&\xe1f\x02\r\x16\x1e\xff\x9d\x9a\x18U\xd9\x0e4\xc6&\x83T\x8f\xfcUS\xf9O\xb5j\x9b\xca\x8b\xf3\x1c\\\x1d\xd0\xb9\x7f\xacD\x8a\xb2\xd5\xcc\xd9\xf1\xa47\xa7\x85~\xb8\xaa\xefn\\\xec\xc2:\xd1i\x96\x9f\xcd4-\xa8v\xda\xd8\x0c\xec\xf8\xdb\xfcL\xf4C\x04\xf5\xd0+\xa1"\x9e\x80\xda&\x9b\xae\x05\x9e\x81\x86\xf6D\xd0\xdc\xca\xb16\xd4QM\x11aX\xe8\x8f\xfex$\xd7V\x80q\xe3w/\x80Fx\xb9w\x02p\xf0+P\x8e\x9f\x84\xc6\xed\x7f\x8c\xeab\xf5\x8c|4\x15\xa7\x98F\xe8\x94rA~\x8d\xb9W\xe9\x9f7\x0cE\x08h\x01\xeb\xa7~I\xbd\x80Z1\x05\x93\xfa(j\xb5\x86\xd9\xcd~\xac\xe0\xb0\x1b\xfe\xe6\tj\xe8N\xcf?\x04v9\xab\xe0\xff\xb8\xa3\xac\x8e\xf9\xdb\xe1\x0e\xea\xbaB\x0e\x8eW\x1a\xef\x91\xafy\'\x8a\x1f\x00,\x13\x9a5nW\xcaC\xd4\xad\x15\xf8M\x03$u\xf0\xec\xb9\x89\xee{\x01\x94\xc2k\xbdsp\x82\xf3\xc6\x04\x9d&\x17\xffS\x19~\xda\x07\x9b*>\xc5\x96$\xca|d\xe9AL\x0c?\xd0\x85\xd9\xcd\xfb\xe2yF[\xf0T\xbe\xe6\xfdRq\t[\x11\xb2m\xd8\xc6$\x07\xa7\xbc_[\xd56eh\xff\xa9 \xfbw\xf6\xd5\xd27I\xf9+;\xd2\x04\xf1"\xc9O\x95\xc6\xbc\x94\x10g\xf1y\xbb:\x13\xbc\x97\xf7e\xde\xb8uJ!\xc4R\x91\xb4\xc0\xe0\x82\xb2\xb0}q9\xbb\x82\xd0$\xe3\x8c\xa2\xd4\xf6\x0f\x08\xb9\xf0Fo\xe4O\xcbP\xec\xd2+\xb3\xbe\x18\xe0?\x0ea\xfa\xd85\xbb\xff\x939(\x9c\xa6\xe0\xa6\x81\x0e}B6\x84\xc4\x8bzG\x92\xb7\xa8\x92\x86\xd1\xe4M\x15D<(*\xd7W\x00[\xa1\xee\xbe\xfa\xa8\xe8\xeeGdd\'\x80V 1\x1d \x81\x0eD\xe9\xbe\x81\xd0C\xa1\x8bF\t\xaa\xc8\xe2\xcd\x88\xa1\xea\x14~\x82\xc6$ZwH\xd8\xc2k\xea\xf5\x84E\x12\xfepb\xe0Y\xe20\xf7d\x04\x07\xb29zx\x10X\xee<\xed\tTfYc\xed\xfb\x03\x88\x85wz\xd3\x10&B\xb9f\x16\xd1x\x9f\xfe\xc7\xd5\xc0\xc9\xac\xfa\xe4n\xf9wFW\x86\xba\xa5\xc4\x9e(jZ\x00\xe1T\xf3h\x1c\x94\xe3\xb5\xde\xd1\xa7\xac\xf8g\x87U[T\x12"\x88\xbd\xf8\xc7\x88\x14\xb7h\x11\x07\xdcX\x94\xfa\xd9\xa3z5H\xbdQ\xb4X\xb6\x92\xbd\x8b){d\x7fM\x986"1Z&\xdda\x92=\xa5\xff\x18\xf1\xfbf\xffi\xe9\xa3\xbb\xe35\xdb\xb3\xaf\x8fu\xb2n\x87\x87G(\x8c\x9bf\xa6\xc0\x9f\x1f0|\xe1\x83\xde\xb7\xda\xdf\x85\xc6\xef\xb3\x8a\xd4\xe3\x80\xff(5\x07v\xc70\xec+\x17r\xa2$\xa3\x8e\xa5\x15\x87\xb6Q;\xef\xa1uR)\xdb\\\x80\xc3\x98\xfe\xdd\xec\x96\xb0\xaa\xec\xcb3&\xa3@\xf6"\xe7\xd1f~\x03\xde\xbe\xee\n8\xee|\x9e\xaf\xd0\xf9\xbfz\'\x80]\xabp\xc4\xfe\x8e\x16\xaeJN\xd7\xc3\xb3\xca\xe7\x82\x13\xbe\x07\xa9\xfc\xb6 \xa09W0qs\xd5\xd8\x81\x04u2+\xf3\xd7\xee\xa7\xe46\x83\xc0\xf6\x11*[\x85.\xec\xe0\x07\x04W\xf8\xa8\x97\xb6\x11\xfc\x1c\x99\xab$C\xe5\x15\xf0\x1f\xd1\xac;\xe0*\xc0\xd0\xdd\x87\xcbE\x1eD\xf6(x\xb4\x0f\xe2\xc8\xe9\xd4\xed\xad\xab\x81\x04k\xf4\xd9+\xe9\x19\xb3\x95\x0c\xf3~4\x92\x99\xac\xd9I~c\x7fZ\xec\xc7\xf3\t\xf0\x96\xd0\x86\xbd\xbb\xce]\x05\x95\x0c\x05\xe0\x1a\xc9\x89\x10g\xe1\xe19gk\xb4\x90\xcc\xb5\x882g\xd9\x16\x8f\xc0\xd0\xa7\x18\xa9\x16\xfd\n\xa1\xfa\xd9\x06\x80%X\x81NPn3\xa9\xfe"e6\xec\xb0\xe4\tH\x7f\x8a\xaf\xfb\xc5\xc3\xc2\x18\x06\xa7\x82\xa9\xc3\xe1\xe3\xa7\xdb\xd7\x7f@\xf1\x84\x0f{o\xf1\x84\xac\x96$\xc0\xd9\xf5\x01\xffiv\x02\xd5AIM\x98\xdee\xd5\x10\xfe\xc0k\x8d\xcab\xda2\xba\xd1[Ah/\xaa`\x8eK\xff\xdc\xdd~>\xc5/$\xb2\xcd\xa1y\xc3K_\x90k+Y\x19S\x814\xf8\xea=\x01\xee\xbc~\x94\x10\xfbb\x90S\xe6\xfa\xdd\xc4{\xe4\xa8b\xdd\xb2\xee6\xa1>\xf7\x88\x89.$\xb1\x81\xdc\x8a}\xae\xa3s\xc96\xa1\x9a\xb4H\x91\x87\x04\xd9[\xb6\xa2\xff\x8b\t\x80"|\xdd\x8b\r\xba\xa8\x07\xe9\xeb\xee\x98\xfe?)_C~\xc4\x82%*ji\\\xafZi\xca\xb1\x1e}%\xe2nj\x97W\x0b \xe8Gd\x12\xf1\x18\xc01\xf0Zr\xf1h\x81\xcb-gFE\xb6C\x80nZ9\xae9\xb2\xd1t\xf1/p+\xe7i\x8e\xab\x9e\xbbR+b\t\'\x10\xb1\xfc\xb85G\xdf\x00\x1f#\x91\x1d\xe8%^\xb0\x1b\xacg\x8a\x92\xc5\x8as\xcb\xeb\xe0\xc8\x95[\x15\xa8\x93\xeb\xcd@\xd3MV\xce\xa8\xbe\xa1P\x85\xafz\xfb\xd2$%\x86\xc3\xc1\nl\xad\x00H\xc2\xb7\xbd\xcd\x08T \xd5\xb4N\xf8R\xfb\xf3\xfb\xbc\x85w\xa0\x04\xad5:\xdd)\xa0C\x19,\xba8o\xd7.\xa0\x95\x85-\xf7\xf0\x80X\xdbi\xeb\x82\x07\xce)K\x96_lC\x02\xac\x07\x91@\x1f\x8b\x91:o\xee\x08\xedLY\xc8\xd9\xeeFZ\x1b\x96\xbf3c\xe3\x08\t!\r\x10up\xaax\x94\xd4\xe5\xc1\xfc\x89\x0c\xc3D\x1f\xffF\xe5J\xdb;\xbd\x16\xc9k\xfco\xacV\xab\xad\xcb\x8f#\x9c\xbc\xbe\xa1\xfe[\x1d\x84\xaeN\xf6Z.K\xc6\xb0\xce\xbf\tL\xa6\x94id\x9cm\xb5\xf9\xff\x8f\x9ft\xffo\xce\x1b\xd6\x86?r\xfc\x1d\xc2\xbd\xd1\xdbQ\xe5k\xd2\x87T,\x14s\xa0\x00\xd6\xe9I\xc3\xf8(\xf6Q?H\xf4"\x1b\x11\xaf\x91 \xfa\xc8\x99IXT+\x13\xdf\x90\x81\x1d\x8b\x1f\xe9\xef+.\xa5\x06+A\xc8\xf9\xfe\xdfl\xa5\xff8*\xc9\xae\xf9G\xdc?\'\x81\xe9\x00w\x93\xd2\xb5\x9d\xa2\xe8\xb1\x1e\\\xd0\x93\xa5%j\xbd\xfe\xabnCM^\xf9=6\xd58)<"d\x91m\xed\x9d\xd3\xe2\x92k\x1c\x86E\xee\xe0l\xcd*&\xd0\x80\x02\x03\xe0O\xcby\xa8\xa3\x86\x11\x011C\xe9\x89\xff\x1fT\xa0\x1ade\xf9\x81iH\xfb\xda\x0f\xa3y\xdf\x00\x8adM#\xb2C\xca\x04\xe2\x83\x03\x89N\x10\xfbt"yw\xe7\x12\x83\x83\xc7\xb8\xd5\x1a\xe1{d\xb0h|\xceMN\x1d\x8b\x8b\x1d\xbd\xf0]D\x8a\xe5\x84n\xbe\xb7P\xb1\xe9\xc7\x924Z.$\x9c6_\xe4\xa9\xe4\xacat\x9e\xaao[_4\xde{\x16\xdbP\x91\xb7\xa9\xbd\x14\xee\xf3\xe8\xca\xf3x\x94,\x01\xe5]\xe6\x90\xff-\xd6/\xb1\xba\xe2\xdc9\xfd\xd7\xab\xe4q[}T\xf4[\xff\x18q\x96\xbf\x03_\xacp\x11\xba7\xa7\xd1&d\xca,kHq7\r\xa3z\x81\xdazyd5\x96xd[\x17T\x8a\x1c\xad%yB\x8f\x9f\xd1\xc5\xc8\x0b\x1aV>\x80\xf23\t^\'\x02>\xaf\xdbx\xb7#\x0f\xadM\t5\xa6\x01\x05\x1b\x9b"\xe2\xfa\xee\xbc\x9b\xcc\xd8\xc0i\xef\xda\xb0!:\x95\x84a\xe6J\xc7\x16}\xab\xc0j\x14\xb6\x7f\xf7\r\x9f\x97\x96\xe7e`\xc7o\xd2\x80xT\x9b\x0b$P!;\x9a;\x9d3\xf1\xf6\x0f"G\x1b\xdc\xfb\x87\xf8hz\xf3\xe0\t\x0bHi\x02I\xbf\xc0>\x07\xd7\xe1_R=\x9f\x84\x9c:\xd1)\x88+.\x95\xdd\x82F\xdf\x17F\xee\xf7\r\x9b}9\xfc\xc1\xca\xc6\xc7\xf6\t\x96E\xb4au\xac\x10`\xf5\xe8\xc89\xd1\xe1y\xf3\x9f\\\xe94G\x02G\x1aX\xdd^$\x98\x90=\x87\x07:\xe1\xfcE*\x9b|\xc1\xd4\x93\xd3\x06\x1f\xef\xc5b_\tth/\xbc\x8a\x81J\xbd(\x00\xd2 \x81/\xee\xc2e\xac\xc3\x8a\xf6ps\xfb\x9f\x83I\xfeK\x9f\x89w\xcc\x1d\x9d\xc8\xdf\x19 \xf7\x8cJq\'$\xaf\x96\xad\x10\x89\xa3\x05\xa7\xdc\xc8\xf1+\xb5\x7fV\xcb|\xfb\xb4B\xffQO\xba\xee\x82\xca*\x12\xd586\x96\xb3\x1f(\xe3s*\xd2\x97\xfa\x0ey\xc9S\xcb\xef\xb2\x935Y\xa5\x9b\xc9{y:$5\x91\xc1\x0eo\xc8\x05y\x93\x03\xe5\x1c\x99L\x0b\x16\xc7(\xa9\xa3S\xcb\x1d\xc2\x17\xb1\xb1\x81\xb0\xa7I\x1b/;\x00b\xc5\xfek?\x1ckl\x11\xaf*\x8fX\x03\xeb\xbe\x9f\x84\xc2\x14{\xb3\xb6_ {\xa9\xfc\x1b\xcf^\xc4v\xe2\xb9\x05\xd7q\x97@\xa3\x12\xceSX\x0c\x9cl\xad&\x1a\xfb\x87\xd7\xd4\xdfer\xc5;I&.s\x83\xf0S\xd8\x06\xbc\x9c\xd5\xffz\xe28&5V\x95\x99?\xc3\xbd\x0e-\xf3SE\xf8G;\x8f\xd2\xdfP\x84c\x90\xdd~\xea\xa3szk\\\xb0\xae&\x90\x06\xa4\xe7\x9e:4< \x12\xd3\x83\x81G\x99\x86.!\xb8\x18\xcb\xe2\\8\xad\xfe/pd\xd4\xab;D\x7f\xaf]\x8a\xd3\x1c\xe1\x1c\xe1^\x85\x10\xdc\xe9\xc2\xdd\xc8\xafn\xcbh\xa4,\x815V\tn\xb5\x04hJ\x15\xda\xaa\x9b=[\xf4\x86\xa8\xd9\xf7/jV\x01\xf7+\x9aY\x0b\x95+^\xbd4I\xc6\x8d\xe6\xdbLC7\x7f\xf9\xee\x7f?f\xd6\x16\xa8b\x0e%\xbdRP\x8b\xc5\x1a\xa45\xd4\xd8\xf3\x8f\xe4F\xcd"O\xc9\xfc\x03\x11\xc8v\xa7\xaeV\xab5\xae\xb3W_\xb6l\x1cu\x9f\xbfC&/e\xf9v\'\xfd\x0c\n\xfe2\xcd\xaf*\xa3\x10?#\xd8\xd6\xf8)\xc4\xd0\xc5)\x15\xe6\xcc\xe27&\x8a^\xc4\x96k\xa9\xb3\x0b\x80\x12Q\xb1L\xee\xb8\x9a\xba=\x0b\xb7d\xc4\xc1\x19w\r\xebO\xda\xf4\xca4\xaf\xb3\x8d\xfe\xb5\xe9\xed\xaf\xff0fN\xbf\xdc\xa5b\xfdc\x8fgs\xf2\xc8\x19\xa5\xd5\xbc\t\xee\x9fvJ\xf1\x1b\x84\xc4+\xc1\xa5c;\xfbG\x1698\x1c\xa3\xa9\x01\xbb\xd3\xf5\xc1Hz\x05e\xb9\x1a\xfc3\x94"\x04\xd3\xe7W\xf4\xb1\xbe\xf2F[\xb1k\xe9\x1a\x114b\x7f\xa4\x1f\xed\xe6KM\x8a\xcf\xb7?\xd7I\xc0\x0f\x99\xa5\xc0kUT\xe7\xa7\x13\xf6\xc4\x9e\x92`\xf2:\xc3Y\x80\xfc\xbf\x92\xb0{7\xce\x8e\x1f\x13\x82\xc4\xf8\x8dD\xbb\xda?\xd1\xb9dE\x17\x1b\xca\x8f,\x13\xce\xdf.\x96\x13\xc6\xf2\xbf\x90\xd2\x90\xfdt\x89\x0bM\xbf(e\xed\xfe\x89\xeau\x8d\xc5\xd0\xb03\xb1\x80\xe30\xf9\x0fi\xa5"1>\x065<s\xb2T -k\xcc4D\x1b\xc3\xfbBwj\xeaFM!\x81\x06h;4\xe9O6\xf1\x91e\xe5\xe6X]\x80\xf51r\xaf\xd9[0\x82\xe1\xb4\xd3\x0b\xed*\xe46\xee\xa9\x1ck\xd0H\n}\x12\xae\xb4\xa8\x81\xd19U\xe7/`\xf6\x07\xf2\x8fs\xcdR\x1a\x92\xaffu\x05=\xce\xbc@\x0eV8\xf6z]\xd8\xaa\x1d\xe7i\xdci\x0b~\x19R\xc8\xd5n\x9e\x1f\xbae\xe7\xae\x11f\x0c\xdb{$C\xe20\xa2[1\x83\xdc_Q\xa6_\xdd\x1a\xa6\x94\xbb\xf4\x94\xfb~\xecP\x97\xa5@\x18\xe3\xad\xf9\x97\x0f\x93s\xaf\x10\xbd9\xd1l\xd2\x85\x0b\xa5\xc8\x7f\n5\x04\r\x9fH\xc8\x92\xa6\xa1\x18\xe8\xe7\xd3\x90\xe4\xb8\xcb\xa0\xb1\xcf\x9e\x8f\xff#Y\x8c\xd1,\xfbnw8\xb0\x9c(M\xeb\xbb\x10\xdc\x9d\xaa\xb5R\xdb\x99i\xf0\xc7\xa1\xe4865\xbb\xbb\xc0=\xacoUM\x12\xb4\x89\xb9r\x85\xdd*!\xae\x17M/zQ\xdf\xd0<W\x19\\\xb79.\x7f$Y\x17\xb7+\x89n\xe7\xb3\xc0\x16Q\xac\xfb$0\xfd-\x88HZ\xb36\xbc`\xe8\x88C\xaa\xa7#\x9d\xb1?K\x1c-3\x9e\x93;~A\xe8\xf4\x8e\t\x98\x85a|\xfa\xc0\xb6\x82\xb9\xed\xec,\x8a\xea\x02-\x03_y7?\x18\xfb[c\x9d}F+~x\xf1>m[$\x1d\xfeE\x8bj\xd3Q~d\xfe\xf2\x06E\xa1\x04\xd3\xa6\xfc=a\x07\xfbY\x07\xb7\xe1\x83\xed\xccJ-\x8d\x8b\tv\nD\xe3\x1e\xc4GkJ=\xb8\x81\xa5Q\xe1\x16\x7f\xe8\xbe\xb6\xc66\x88\x12\xc3dm1\\8lO\x08\x95\x95R{X@\xef*\xbb\xe9E\xfeVGe\xd9\x99~\xa7\xe9\xa9\xf0\x98\xb9\xf4\xa7\x88,}\xa7\n\xe8\xebFl-\x8e\xfc\x00\x8cT,4\x9c\xaa\xfa\xac\xcc\x18\x1d\xb4?N\xfe\x12\x0e\xa1\xfe\xba)\x88\xb0!\xc4\x8c\xcd\x95N\x8b\x12\x92_Bv\xde\x97\xd5\x1b?\xe0\xc4\xe1\'T\xbeSX\'\xba\xf3\xe3\x03.\'\x82^\xdb:\xb6\x15\x08\xa2\x97\x1e,\xc8\xc5\xa2\x05\x18\xcezV\xd9\x04\xc2ni[\xd3\xda\xf5\x9c\xaa;\xcaY\xba\x80\xe2r\xd4i\x9e\xf1\x0fi\xa0\xfe\x8e\xed\xaf\\\x01\x00\xdaw\xcd\x84\xe8\x99\xde6\xa2\x1a\x88m\x90.\x7f,\x80\x81\x9ft!\x8b\x8c\xaf<+\x153P\xefR1\xf5\xa7JR%\x86a-\x7f\x82q\xe6d\xb8=]\xb8\xcbV\xfbPF\x16-\xec\xfe5\xaaW\xadIl\xbf!\x8c\x94\xf8\xb8\x00\x02\xa7\x0c\xb0=\xd1\xad`\xd6\xcd\xa6\x89\xcb\xf5b\x7f\x9cS&<\xf4z\x17\x94\xeb\xf3\xf4\x07ze%,\x12(Xn\x12\x05\x14\x04\xbe\x04\xdf{\x8edG\xd8\x1etO\x9bO]a\xbbH\x1e\x9cB\xcc\xc2\xce\xefH\x00\xc4h\x9b\xe77\xa1\x0c\xfc\xd6\xfa\x8e\xe0X\x13\xa0\xc8p\xd7\xcf\xda\x1e\x9f\xf7B\x86\xa1F\x1c\x95\x81$r\x06.\xc9\xfbF8\xad2\xcd\xec\xbf\xb7A\xd2:\xe1\x05\xff2\xa1\x13o\x15y\x1en\x1d\xd7\xf5\xcd\xc4\'%\xd9\xf8p\x00\xb3\x02\xf9E\xda\xdb%E\xde\x12\x04\x13;\xdd\xa7\x82\xdbb!\xf5\xed5b\xd3\xe6\xd3\xda\xd44d\xe4\'\xee\xc7r?\xcb\x1c\xf7\xcc\x84i\x05\x9f\xc6Th\xdf\xaf\x13\xf7\xab>\xbd\x134u\xbf!\xde{\x83\xb6X\r\x96\xcf:\x1eL4p\x01>\xfd\x83~]\xaf\xa1`Q\xe8\xe6\xaaa\xb5\xb1\xb9p:L{y\x13j\xceqI\xa6\x07/\xe1\xf6k(\xf7\xc7\xd1\xca\x92_\xc0\x1cI\x08\xd0\xa9\xa6\xae\x0e;\xd2\xc8\xc4\xab\xa7E\x0b\x9b\xe5\xdd\xcc\xd5\x81!\xde<\xbak%\x83\xf3\x91\xde8Q\tK\xda\xb3\x02C\xf3\x17\\\xd0\xb9\x03\xcb\xf4\x07\x17>\x00Kr\xc0\xfc\xeb\xc4\xd3\xdd\x99]2p?\xf8\x84pv`\xe0f\xfdI@O\xf0^B\xa5\xecG\x0f.uw\xe7\xbb"\xee\x0b\x93\xda\x8e\xe2\xe8\xeb\xc7\x92\x8bn\xe1\x19\xf7\xe2f&yc\x80\x1c\x0b\xf5\x9c\xabe\x08\xcb\xf6o\xbd\x10\x88\xd2\xfc\xb4t%\x19\x15D\xf9R-\x0f_%W\xb4\xdc\xd0\x94\'\xea/P-\x9esH\xbc\xa6}\xc9\xc8\x14\x14i\xd6\xda!\r(.\xd5D\xd3p\xc6 \xd8\x9dk\x90\xa4X\x7f\xaen\x8e\xa0gR\xecbZ\xf3\x01`\xc2U\x14\x9f\x97\x94pH\x80\xea\xf4,\xd3\xb2\x0e\t\xa5;T+\xf7\x8f?\xcc\xbb?^\xc0\xf6W\x9c\x89q[\xe3\n\x16\xd0-\x9c"\x05U\x01I\xe3u\x07@F\x10\xfe#\xd43v\xfd:\xfdm\xca\x1b\xae5\xc5\x93\x912\x19\xba\xd5~\x16p\xeaU@,*b\r!\xd8\x83\x97(\xed\xcf\xa7=\x18\x88\xddG\xa4S\xae\xdeL\xf8\xe6\x92^\xac\xee|v\xa8\x8d\x83\x863f\xa8\xfd{\xd2\xb0~\xd9\x90\xc3(5\x86\x8f(\xc4\x87\x8b\x9a\xbc\xb3b\xd2\xd8\xf8\xb7\xc7\xd7\xf6\xdfw\xb4J\xddN7>\xd9\xb7A\xe5\n\xc3\x98a\xf5\xfc\x16\x9f\x9b>\x07\xb4\x8be\xc2\xf2\x941}\x89\xd1\xdb)\xce=}\x857\xa8C\xac \xe2\xd9\xbc:}\x12R\x96\xe8\xc0AI\xb8\xab/\x81]\xf2e\xfc(\xf8\xadP\x06f\x1a\x97<\xd5{\x90[\xadiH\x95\xef\xfe\x8d\xa4\xf3&;\xefB3\xb1\xf8\xaa\xc2t\xb6\x95v|\x02\x18\x89X\x9d\xab^\xbf}\x8e\xf8x\xc5\xdc\xea\x1f\x06\xf9S\xdc\xbd\xd7J4\x13\xf2\x93\x8cb\x94<\x82\x1cO|M\x11\xb1T\xbc\x7f\xea\xd5\x8e\xf3B\x8c_/\xb5Sx\xa2\x1f\xdb\x0f\x0c(\x85f\xa2FRF\xe9%\x98\x95\x02\xbb\xbfE\x95\x16\x9a\xd09So\xa0\xc0\x11\xde>\x0b\xc8Q\xa7\x9d\x00^\x8e\x0f\xd2P\x0e\x01A\x9e\xdf\xc9,\x02m\xdeT\x04T[\xbe\xa5\x08\xe5*P\xd08\xc4~\x7f\x1f\xa9\xfaX\xa5\xa8\x10\xee\xce\x84\xf7x\xb1"\x1c%oel\xc4\x8a\x97\xeb6\x01\x8d\xda\x14\xe1\xd6\xbf\x90\xf9~\xe7\xcdD\xec\x96>\xea\xf7\xbf\x16\xeb\n\xf04\xbf\xe9\xe1!\n\x9c;\xa6\xfc\xe6\x1b\xaf)\xd9\xbcF38\xa5"m\xe1\xe4\x96\xafe;fD\x1fg=8l\xe7\xd8\xa6\xc0\xe4$\xbe\xfd\x01\xf0\x88\x0ct5%\xecA\xba\x05\xbc\xf6<\xff\x82\xe2\x13\x1e\xeaR\xd2D9\xf5\x98\x13\xdf\x00N\x8c\x89\xaa\xf3\xb8\xd3Y\t\xe1"q\xce\xf8r3\x14\xd9\x1a,y\x0cm\x9d\xd1d\x8c\x19\xe5\xcc\x14.\x86\xc6\xb0\xf1\xe7\xfe\x9a\x0f\xdc\nI\x8d\xb6\xd2\xc1e\x9d\xbd\x997\x12\xff~y`\x93\x8a\xc1A\xbcf\x93\xe9)\x07OMF\xfc8\x18AM\xea\xe3w\x8d !g\x895\xd7\xb5Qv\x7fj\xd9\xfe\xd2Kc\xa7)S\x92k\xc5jo\xbfs\x85\xbd\xbb\x0e\xb0\x1b\xbf\xba7\x8c*\x99\xaep\xbf\xe7\xf3-\xba\xe7\x03L{\x17\x98?\xa9\xf8W\x1d\xe5\r\'\x15[\xb7pa\xe4f]h\x8d|\x02V\xaa]\xfcw\x1c{K\xbe[@\n\x06\x92t[\xb5\xcf\xbd\xe3\x96\xe5^\xe6\x19#\xe5,8ZB\xbe\n\x91\xb1\xbc7n\x9d-\rU\xdbh\xc2\xa4\x82\x0e\\\x139\x06:\xe5\xa4\xb0SD\xd5\xfc\x0e\x119\x9b#\x83\xac\xbe\x87Bg\\]+\xcbT\xc2\x9fS\xbeB\xa06\xc6/\xc8\x97E\xbb\xce\xdd"\xe3\xa9\xcd4\xaa\xeb\xba\xea\xc1;V\xe2M/\x82\x1dgkqO\xac\xf5^f\xaf5\x97\xbb}\xeb\xde\x12\xf1\xe6\xf9\xa4\xe13\x14\xdbxi\xcf\x86\xe73\xff\xeaJN\x03=\x80{CL\xb9\xbc\x89 M\xb8|\x1cv\x94\xd2J\xc0\xff+nK\x0f\x05\xae\xe0B\x07\xfb\xe1\xba\xb8\x87\xd0v\x17\xa3\x93\xfc3\xcb\xc1\x9cS\xfem\x19\x00\xf4\\\xc4\xba\xe1\xb2\xa0\xd4\xfb]\xde\x06\x0e\x17\xa2\x92\xfd\xd3\xf75\xad\xc0r\x94\xae\x10\xfc\xb4\x02.\np\x14$\x9d\x1e\xac\xfe\x8f7\xfc`\xea\xa1\xb1.gG\xca:\xf5\xb3r\xa3=\xb2\x0f\xca\xf8~\x1c1\xdf\x08\x05\xadh\x9e\xd8N\x95\x81\xad\x96\x80\x85C\xaciC\n\x9a\x17\x0c#S\xba|\xc5\x98\xe8\x18\xf6S4G4\x11\x06F\xe7\xfd\tur\t\xf9h\xa6\x05y!\x02\xd5m\xec.\x85l\x8b\xb1>\xb5\x8f\x7fp7\xd7\r\xdf=F\xb4\x7f\xfbVgm\xfc\x9f\xf3D\xd4}\x1d\xb9p\xcc`\x9d\x9d\x8b]\x12\x0c \xcaD\x8b2\xd2\x83zY\xafM\xba\xbf\xf0\x85\xd1I-\xf5J4\xc2\x02\x0f\x85\x91P\xa9\xfd0N\xf1\xc4j\x99\xfc\x9f8\x1e\xfb\xeb\xb1\x98\xd2,IQ9\x10E]\x84h\x7f\xc5\xaf66\x9c\x84v\xdbCL\xdePd\xeb\x9f,\x88\xb3\xe4\xfaIP\xb1\x9c\xa0\xa3\xd9\xb2\xee\xa6\xa4\xb9[~\xff\x81\xc5\xd9O\x9fh\xfbX\x18LyZkE\xf2\x99\\Q\xd0\x17b\xc5\xa9\x1b\t\xcaoBK2\xf6P\x80\xf1\xef\xb3\x0e\x0cvDNH\x8e\x19\x18\xb9\xb6\xe1eg\xd3\xa75\xb8\xc82})\xc5\x0c\xfb\x84-\xee[\xecY\x12\t\x90j\xf7\x1b\x86\x1a\x0f\xf6\xb1D.\xfd\x05;%@s\xc5\xa6\x9c\xae.~\x86\xbe/\x80\x18\x06A_L2\x19\x1c\xbe\xfa\x173I\xf6\xb6-yT\xdc\x1bs\x11Y\x99\x81#\xbfU\x1db\xd5\xe5\x8e8\x13\x92\xaf\x0b\xcc\xd5\xef\xf2\xa9\xd5y\xe0\xac\x1ef\xc0\x1d\x11M\x01\x19/\x87\x8c@\xa7$\x9e\x0bw\xa7\xcd\x18fI\x03\xd5.\xb6(wm\xfa\x04\xa4L\xf9/\x1c\xa98/t ,2(\xb8r\xa0\xf5\xa7\x8a\xb2\x1b"5\xd8\xc4:\xc0&`\x87\xd5\xf6\xf8\xcb\xe7x\xd59H\xeb\x94\xa5\xd3\x01?,/\xef\xc3\x07t\xdf\\@<\xb9\x19\x83\x80\x03\xb5\x9a\xb8\x15\xc4\xa8\xf35o \x08\x91\xd0\x15z\xfa\x17Sz\xc4\xfd\xa5w}\xf5\xb2}4\x8d\xa8\xb1\xc2\x8a\xc8\xae\xa1\xdfSR\x90<@\xffa\x91\x131*_\xfb\xce\xe0b\xde\xfa\xcc\x9e\xadNa=\x80\r\xcd\x1c\xed\xc3bB\xd7M\x0b5\x10\xc9\x1dK\x90S\xf2\xf5x\xe9\xce\x10\\\x90\xd8R\xd8\xcf\xdf\x1c\xb7_\xcd\x16\xc5>\xc4\xfc\xc8\xddHqs\xabE\x94]b\xbf\xffn \rK\x81\xa5\xee0\xf1\x13\xd3hK#\xc4\x05\xa8\xb4zE\xad\xb0\x9cu\xa9e&\xa6L\xc5.N.4\x1a\x10\x15h\x129+`\xcd\xdbC8\xd4%\xd5\xf9q\xfe\xad\x06\xeb\x07\xa4\xd1"\xc6Yp\xd9\\\xa3A\xfd\x97\xdf\xe3"8h\xe4\xb47\xa3j\xf3\xa9\x88r(\xd6\xb7\xf6\x8e\xf3\xe8\xe9\xb0\x01\xfc\x186%\xe4RC\xcax\x07\x90\xc8\xbf\x9a\xfb~\x9d{\xacK\\\xd3\xff\xb5@\xba\xdc\x06\x10.\xc3K\xde\xc7\xb7\x14m2SCg#\xed\xc7\xea8\xac\xfdI\xa3M\xe6\xf3\xde^\xfc\xc3H\xfcF4\xb94\xea5\xe7\x88\xb3\xc8\xab\xf8\x9e\xa3\xd7\xa1\xd4\xbf\xcc\x1b\xc1[-\xc8\xc6\xab\xc3\xe3\xc9\xe7\x03IfN\xb2\xc7&\xe4\xbd\x0f\xbcy\xc2\xcf\xae\x82A\x1chrKcWb/J\xb0\x99\xc1\xa8\xady\x01A\xec\xb1\xcb\x8e~\xd8\xb5\x03!\xd3\xbe\x95g\x19\x03a\xbf\xa6\xd3\xcbK#\x12\xec\xba\x95h\xbd\x97\x0f\xca\xf2\x8cNc\x9c{M\xd9\x04\xe3w\xc3`\x0f\x96k\xf6b\xf5\xc4\xb4\x8bx\xbaC\xf58\xa3\xea\x99\x9a\xeag\xc5\xdcD\x88\x9d\xd4\x92\xe3\xc3\xd8^5K\xe8G\x1eq\xf9\xc8H\r\xcb\xce\xef\x83\xcb\xff\x8b\x16\x1f\x08e\xd7\x8b\xbc\xe1\x8a\x80dA\xfc\xd7\x99\x1e\x97\xef\xf9uu\xfc OQ\xad\xa4\xa2\x99Y\x15\xbf\xd3~\xf6\xdd\xf1\x82\x0b\xc5\xf7_\xe8P\x13\x0f\x1a\xc6\x8eq\xe5#\xbe\x99\x93\xb3\xb5w\x1f\x80\xaa.\x84\xe8\xd8c\x03"op\x13K\xf0\xa9wc\x16\xcf\x8a\xb63i\xc2\x17\xba\xf4yn\xb7\x99\xd9\xe3\x04K\xfb\xeb\xea\x96\xa9:&&D\xa0\xa4.\xed\x8f\xf7\x95\xab\x02\xbbgs\x06}2\xf0\xd11\x84\xd1\x8f\xc4j\xe4\xb9\xd2<\x8e\xee/\xac\x89\xa6\xb4\x7fA\x9fs\x02\x80w\xc6R\xc7\x05]\xea\x12\xb7\n\xd1\xd3\x7f\xe0\x00\xfds\xf4\xdf\x08\r)\x1f\x8d\x17\xab4\xb9:\xca\x8e\xa1{\xf5\x9a\xa2\x89b#1\xe0\x1d\x17iJ\xe5-\xc7\xab\xec\xac\xef!#\x80\xd9[\xd8f\xc7PA\xe6\xfc\xc6\x8b\x8b\x0eb\\\xb8\xaf\xb7\xbb=\xf7\xe52\xe9\xd1+\x85bl[Bc|\xf9\x8b\xaaK\xbe2\x12dw$\xcb\x91\xe5u\x1b\x8fR6\xf6\xaa\xdf\xf3\xde\xe0\x96H\x90\x94[\xf2\xf7K\x14\xad\x08D\xc1\xbd\x89T\xa5\xaf\xce\x13\xcf\xc1_5D\x1a~\xb1PqHf!\xea\xae\xdf\xc4\x0f\xdeD:S\'\xdd\x96\xb3\xb9q{`d\xa2[\xdb\x01^\xc0\xa2\x81\xe4\xe9\xc1\xa8\xe9\xf6+\xdc\'\xe5Zup\x99\xe6`J\x08\xb3\xacA\xf9(\x07\x95\xc9]dg\x82i\xfa\xdd\x01\x97\x97\x9e\xf0\x9d\xef,\xb9\xb3e\xd9\x9b\xed\x1a\xfbt\xaa3\xaf7b\xaay\xfc\xf1h;9\x91p\xd7Z\xb9\xb6)B\x8d\x13\xa9\xf5`\xc5at\x84\xff+g6>%\xe8\xcb,\x7fo\t\x99\xdd\xa7\xf7u\xe1\x1f\n\x05\xedux\x8b\x0clC;\xee\x89\xf2\xad\x17\xb7\xc0|\x11\x8b\xb3\xa5\x1e\x05PjV0aX\xfba\xf3ig\x91\x92\r\xdf\xdc\xe1\\\xd5a\xa5\xc6OS\xffF\xe9T\xdb]pQU\xd1y=\xa3"\xb1\xd4\xb8em\x1e\xee\x0bwF@\x92\xae\x8e\x9a\xcd\xbe\n\xcb\xff\xd8E!k\xdcM\xdb\r&\xc4\x15\xc1\x97]\x80\xdc\xc25v\xb8\xdb\xbe/&&\x85\xe6J\x12&\x03\x16\xa3\xa9\xe9\x16@yi)7aL\x9e\xb6Y\xf8`\xb1x\xe2\xbdyHH\xb1\xc0\xe1\x19\x19\xd2\xdfMf 4\xea\xf0-i@\xb5\xc2\xa2[8Hs\x80\xfb\x87\xfe\x83R6\r\xc6(\x83\xb6\xab\xd5v\xc9Q>\xf5\xd3\x15F+\xda\x10\x03\xd6\xaf<bX\xc0\xe7\x88\x1cI:$\x8e\xe4\xa9\xa3ty\x0f@]Z-es\x99\xc9`\\\x14\xb3\xdd\xaa\xfa\x97+\xfe4\xbda\x9e\xeeY\x87\xdbs\xdb\xb4<x~\xcb+\xb0\xe4x\xe0\x13\xfd.\xb6\xae\x89`\x0f\xfa\xe4\x89\xc0\xa4x\xc2\xee\xdd\xc0\xc4{\xfb{\xfa\x00\xf7\xaa\xc1\x88\xe2S\'\xc8\x9d\x9e\xe3\xcap\xc4s\xed(\xab\x02\xb3\xbf\xe2+\xc6\xa5u\x99\xf6b\x1e\xff\x0fr\xd4~\x93\xb6c\x91\xaa\xbf+\x1b\x06\xfb\xea\xa9\xd97\xe96\xb3\xcb\xd5\x99\x87\x08:\xd9\x1a\xb6z\x86\xfd\xe77H\xb3\x9fa#\xa5;{{\xc4\x1e\x1c\xb8\xe8lBi\xfa\x97\x81U\rZAy\x85[\xbc7\xff\xec\x05\x1d\x19@\x03i#\x13k\x06\x99\xa3\xfb\xeb\xa1\xbf\xca`g\xc2\xa1\xb7\x91.?\x96:\x9d!\xe3 \x17\x99F]\x9e\xa6E!\xf0\x9a\xeb5k\x99\xe0\x8c\xd5n\'\xf1\xe2\xc9\xc4\x07\x07\xdae\x14\xa6T\x8f\xb9\xf1;\xb3E\xfa\x80\x9b\xad\x16\xcc\x12+\xfaw\x02.W>\x1bK\xa2J\xa7\'.n\xd5S\x9fJ@{|V+zw\x8b\xf4\xa0\xfc\x18\x8a\x9d,t\xf6\xca\xc1?\x1f\x90\xa83gY\x97\x98\xd1\x981\xa1\x1b\x0e\x94!o\x84\x9b\xe3\x8d5\xad\x0f5\x06\x06\xad\x8a0\x90E\x9c\xceI\xd1\x04\x05\xd6S\x92\x92\x7f\x92\x00\xec<knSX\x18L5R\xdd\xba\xf0;\x92`\xd2\x19\x1a\x1fA\x0f\xd2u\xf0.5V\x95\xf5b\x195\xf7\xb68\x83Ws\x1c\x05\xd0\xfe;\xcc\xb6\xb3Ct\xeb\xc8<\xd8^\xcdm\xe1\xe9\xfd\x8a\xf6\xfd\xaf\xe6\xd9\xb9_B\xdc\x950(\xa0\x98\x18eB\n\xd3\xe9\xef\xa3\xe9`2\x8a*\x80\x8a\x0e\xc6\x1d@\x9fG\xdd\xb7\x9b\xadg^}\xce?\xddJ0\x18\x03nn\rwc\xe4n\x92\xa6\x97M2K\x99\xcb\xb2\x9b&5\x8a?\xdb\xbe\r\xccc\xe1\xee\xefh\x18\xfe8\x91"*f\xf6\xbdce\xe4Y@!\xcfz;\x01\xa9:L)\xa3\x1a\x9a\x06\x9f \xab\x08A?\xe2=\xa5LA\x87Z%\xd0\xb0\x9fs"\xb7Y\x06\xdd^T\x1f\xaaEb\x90\xa7_\x96\xf5\x17HP^\xa8\xa2\xca\xcaXW\xb7jnZ\xbc\xb8(\x0e\x1bm\xf9\x95h\xbe\xdf\x06/\x04V\xe9\x1dq\xf2\xee\x0e\xc9(\xe8\x92\xdd\xd0\x91B\xc3\xad0\x14\x12|\x0eP\x86\x1bx\xab`\x96\xa1?H%\xc5p\x83\xacw\x9a\xda\x9fb\x8d\xf1\xae\xd87\xcb\x04~\xa6\n9\xf2L7m\xa1\xed\xf7\xf2\xf5PoD\xc9s\xaf\xaf\xfff\x8c~\xc5\x84\xfe\xee\xbd\xb4\r\xfel\xfe\xd3\xc5\xd9w"8_j\\0\xa7S\x885u\x9a\x85\xb9R\xe7\x84\x97hTIZ\xbdY\x15\x88\x00=7\x97Sn&\xbeF\xe9\t9a\xf9\xec\x04\xce\xa77\x9dG|F:q\x9c\x03\x86\xe8\x88,A\xc4\x86\'\xf9\x02\x14B\xec\xab\xa9k\xdb\x85^\x17G[\x9d\x05\xa3\xfa\x1d\xae\xe6\x0c:\x85$\xb6/\xb3\x94\x82\x9d|y\x8c\xe6\xf8\xfe\x96\xb0\xa2\xc7`-\x8c"\x0e\xd0\xef\xbfa,K\xaa1\x87\x8f\xc3\xcb\xbf=\xbf+uw\xde\x8c\xca\x88\x87\x00j\x8b\xaeT\xda\xbdNa\xa8\x062\xf6\x17\xf2W2c\xf6\xe7\xbd\xfc\x92C\xa7;\xbcj\x11\x93\xa9Dk\xaa!{:\xb7\xec\xf3\xaeY\x88\xba=\xbc)1;\xcfJS\xee%R3\xf4\x14\x99Y\xc9\t\xb3\x9f\x01p3+\x1a\x085\xd84\xe3\xdb4Q:\x1a\xedu>\xed3\x84\x9eqA{\xf2|lf~\x0b.\xf0\xd3\x18\x05\xee\x17aP\xd6v\x14\xf1\xb2\xa4\xd1\x90\xf8\x06\xa2\x14\x97\x80w$\xb0\x0bA\x15\x19\xcb!\x91\x9eP\xbd\xe1F\xd2\xb8\x8a1\xb5}\xb4\x87\xd5\x90\xa3:M\x97lV-d\x1eY\xf1\x81.(\x17\xa5iD\xf8\x13\xb3\xd6K\xe8\x10Nt\xca\x17*<\x14\xf45\xef\xcd9\x81\xa8:\x01\x8a\xa1H\x02\xce\xb4\xdf\xe9\x9eVT\xc5Rf\xa6!\xff^\x95\x1b\x97\x81\xfc\xe5y\xb7\xec\x00]\xd2\xf4&\xfa#I\xa8y_f|j\x94\xf9\xc5)\xfa\xccn\x7f\x17\x81_0&\xcc\x8f\x1aca\xd4\x80<\xae\nl\xe8 \xddD\xe3UF&\x9fl<W3\x9e\xb0\xe1c\xbe\xb8\xf6\xe5\xe6X#\xec`\x99\x90"\x8c\x1e\xd3\xf2.\xac\xe5O\xf8\xfa\x05\xe65\xb1k\x18\x9d\xa9\xda\xbf\x89\x9e\xf9\xec\x06n\xd7T\xf0\x80\xc7\x0c\x98\xba\xb2\'\xbe\xf47\xf0\x8b7\x01\xe1\xfc\xaeD\x8a\xf6\x19\xb4\x9c\x1cDl\xc8~\x9bl\x8a\x15\xc6s\xb6\xe8\xa0%7\xf7\x10\xec&v\x91\x92\x18\xca\x1b\r\xbc\xa4\x01tv\xe3\xeb\xf9\x93\x00%:\xee\xa7\x1dn\x820d\xcfmF\x88\xb7\xe6\xa7\xe3\xd6\x7f1\x7f\xb5\xbf0&4\xd6\xa4| \xce\xc3\x93\xc32\x9e\x0b\x8c*{\xdd4\xa5\xfb\xf3\x97\x9b\xeaV\xff\x05\xdeJ\x16#\xf5\xe2Ls\x81\x8a\xcd\xbc\xc9i\xa9\xbf\xc2\xd3W\xad8[\xf3~\xa1*x\xbd\xd28\'\xdc\xf5\xe0\xfc\xdd\xb0g\xed~\xac_\xfc\x98\xa3\xf6\xf2\\\xe3\x18\xf6\xa5n\xaa\xcf\x97\x9bz\xb4/W\x90\xb9\x8ayn\xd9-+\xbdo}\xdb\x1a\xf0\x1c/QM\xf5\x14\x95%\xc3Q\xdd7zH\xe4+\x87}\x9f\xd8\xf5Lv\x90\xb6wXU\x015\x8c\xbfU#\x89\rg\xc9\x93M\x97\xd8\x13\xf6f1\xa6\r\xd9r\xecI\xc1\xc5\x96{H\xb6\xe6H}G\xc4\x17\xdf\x15nHF\xbd_\xaaB\x13<\xbc\x14=\xdf\x85\x93\xe2\xec\x9a$B\xe2\xb7u\x95\xa7\xca\xca-"\xd8\x93\x16$\xbe,\xe9\x90\xd5\x01\x07\xaf\xc5\xd2\x1fZ\xe3k\x8d\xf4\xe0H\xdcDx\x057Yo\r\x05\xd6O\xc5-:i\x1c\xe8a\x1c\xc8\xea6\xc2/\x9b\xdc\xd8*\xc2\xe9\xa9\xcf@\xb2\xde\x1dp\xde\x99=E\xde\xe2\x82k\xbd\xa9\xcej\x98\xeb\xb8\x91!\x1c\x14G\x94\x9a\x08\x08cA~\x06\xcb\xfd\x03\xff8\x99;{C\xbf\x90\x87T\xf2\xf3\xcb\xf4\xd5\x10\xe8}\xc3\xcas\xf6\x1c-\xfd$\xeb\\\x1a\x19\xba\xa2\xc5\x0c?B\x00\x05\xb8\xbc\x89\xb8\x8d\x1f\x8f\xa4\xed\xd8)`=\xfd6\xec\x9d!\x87\xba\n\xd7A\xa3\x87X3\xed\x7f\xaa\x0f\xe7\x19\xcfQ\x85\xaa\xf1\xb8uP\x861\xea\xa7\x8d\x9c<\x1c\xab#\x99\xf8\xe3BZ&\xb5\xa5\xb5\xfc\x98h\x90U\x8dL\xbe\x1e\x89\xb2\xcf\xa2j\xa7%9\xe6\x98\xff\xbb\xa1\x93Oi\x0b\x9f\xa2\xfb\xf3B\xc65%\x8di\'Yy\x87\xa5Yi\xedI\xb9\xc2\xcb\x80\xd4\xa9\xff+TEyv\xdf6\xa4?\x92k\xd1W\xc2\x9e\xe0q\x94}\x04 \xac\xc9\xfdm\xf6H\xa8\x97\xff\x80\xf5\xa6h\x7f\xd1\n"\xc9\xe4>\xf02y\xbd\x87\xb8\xd2\x95\x983u\xe6]\\\x07\xcc\x7f[\xb9\xda\xd8\x08\xc8\xa9\xbd\xcf\xd5\x83\xc5\x9d+\xb3\xdd\x90p\xc4M\xea]\xb1\x1f9\xc4\xc9\xbf\xd6t\xc8\xb0@\x95\xa3\x7f\xb5c\xc1\x84\x073\xac\xb6/\xc1w(f\x0f\xbc^\x9e\xb6\xcc#7F\x1e\xe7\x00\xd3\'\x13\xb1\x85\xcb6~\xdf\x9a\xc2\x19\xaaDUGk\xb3\xf1\x87\x9bw\r\x15;$\x81\xef\tq\xf7\x1f\xdd\xbax\x1d\xefS\xe2\x96\xa6J\xb4\xef\x1d\xe4\xec\xd2\xf8L\xe5\x93\x95\x94\xa39\xcd\x9aB\xc0`dW\x18\x11\\\x1c\x88\xddjTdr%\x07\xbd\n^\x9a)0\x03\xfa\x10\x1ay\x17P\x9a\x16\x7f\x0f\xa5\x8e\xa5\x83\n\\\x90\xc4\xd8]\xc6\xb2\xfa\x16mC\xc0\xa5\xb8O\x1cl\xd9\tm\x1eI&FW"\xb7\xf6v\x06f\x11E\x83\x18w\x93\xcd\xee\xf8N\xbe\xa9\xea\xa1"\x95\x01\t\xc0\xf5i\x98\xa8\xe9\x04+;\x18\x92]\xcclW\xa3\x88\xec\x19\xc5\x9a\xff\xec\xc8\xd7\x10\x06\x16\xd8_\xf0\x8bW\xe9W\xa8KGxg\x02\x1c\x8d/\xb0i\xd4\x89\xf5\xd3\xf2n$b<c\xa1\x87\xadq\xb969\xdb\xe6\xdc\xe6\xc4\xd9\x94U\xf3\xd5\x8fk^\xcd\x98\xc0Sp\x18\xf2:m;\x15c\x9f\xf4b\x89\xcc[\xc8\x01w\x8c\xf6u\xf0\xa3m\xb9b\xd9\x82#\x18Q\n\xb0\t\xf1\xd5~\x06K\xb2MP\xa2\xae\x00\xbd\x01\xa0\xd1\x07\xde@\x10v\xb1,59>[\xbd\xaf\xab\xf2\xc1\x9c)\rr[\x8b\xbe\xb2\x92o\x81\xcf\x9d\xe0<\x91\xdf\xd2\n.\xc5\xea\x9c\xdf\xa4\x87\xc3\xac\xb0\xfeD\x96\xeb40\xd9\x7f\x95\xc7Eb\xe96\xa3G\x1b9P\xa8\xd31R\xea\xaa\x07\xd3\xab\xb7\xa5\x1f\xfa.(\xcc\x88fl\xd2\x02\xcaxM\x9b)\x12\r\xf4\x98\x99\xf7\xf4\x0e\x1f\xe3\xed\xe1L\xb5\x00\xaa\xf0\xcde\xa8\xdc\xb2\xb3M?^\xdd\x1a\x91I\xa2q\xffo\x956m\x85\xac\xea\xccI\x0e1\xe17\x0cO\x1f\xcb\xd8/m)\x19a\x8at\x1f\x93\r\x81\xe6y}\xd2\x04s\xc9/\xdf\x02\x87\xae\xe6\xb2\x056\x9d\xc4\xd7`\xd0\x11&b\xbc6B\xfe_W\xab\xba\xfc\xfd\xcc\x8a\x8f:\x1c\xe3?f\xd8\x82\x92\xd2\xb2n\n1\xc9<\x159+\x9ez\xf7_]&\x1e\xae=\xcb\x93ua?94\xdbl\x97\xa2\xd0f\xfe\xc42,\xaf\xa2\x04\x8f"u\xc7\x9e\xf3\x88id\xd3\xb0\xd5\x11\x9e\xdb\xc7\x93\x8f\x89\x96\xa3v$NGH\xd5\xff\xda;L\xc7\xf3\xc4\xfa\xc3Y\x8d\xffn;7v\x94\xf5_\x9b*\x92e\xe6\xf0\x8e\x855:\\\xec4U\xbcA"\xbd\x1e\xbc\x11\x9d\x17M&8)\xe0\xbf\xbd\x883\xab\x0c\xac\xac\xc2\xc9\xbb\x87\x8e \x13\x94\xa9y\xb5\x91\x15\xd9\x84a\x14D>\xd8\xe2\xb9My\xdcc*\xfe)f\xa2\t\xd7\xa6\x7f\xcbo\x95\x81\xdd\x96\xcd\x16\xb8\xfa\x93\x8d\x18\xf7P\xe2_\x92\x0c\xbbPp\xaf\x04}\xf0\xa5\xc5IN\xceQ\x10\x0e$\x0bgp"A\x94T\xe0\xc0\xeb\xc0\xae4@\x0c\x1b\\\xf1)\xff\x08\x8f\x8e\xfb X\x9bv\x8b\xc0u\xa2\xa0\x83\xd3\xbe\xd2\xe06\xd2\xf3\xeb\xe8&z\xe0\xf1\xae3\xdc!j\x91\x8b\xb2_\x03UE5Zz Xi\xce\x0c\x97\x99\xf9\r\xfdj7`\xf9i\x1d\xbdQ\x1b\xb1\x8b\x91r#\r\xe9_#-\xcf\xe0\xc5\x90\xce\xb5\xe9\xc6\xc5\xda\xe8\xa7\xf6Q\xa0\x11\xe7\x16(5V<\x19s\x93\xa8+\x83\xc6\x1f\x05\xf7Z)\xbc\x82\xbb\x02\xc8\xea\xa1*\xf2\x8f\x80\xf6\xe4\xcc/\x15i\x08|\x1c\xc8\x02\xf2\xe8\x82p\xd5c\xb3\xc8\xd5\xbe\xbd\xab\x0f\xbeQ\xb23\x02B\xee\x9fE\xbc_\x91\xb0_\xa8-N\xfdn\na\xeb\x11\x94\xe8\xb1p\x87\x0b\x82\x1b\xc9\x04T\x8b\x07\'\xb2\x83+\xb5v\xbe\xfc\xccC_\xda!\xa8\x89@\xd1N\xb9>\x1fgl#\xa5\xc1\xfcf\xf0\xa2\xceM\x95\xf1\x8e{$\x17\x81\xe5\xa4)J\xb99\xa3\xb6\xd4|\xa6\x18\xbf\xee\xe5\xaaU}]\xc4\xf8\r\xc5F)%\xf6\xb4K\xd9\x9a\xc3\xe1\xc3\x9c\xc0d\xb1S\x89\x02\xf9\xd9\xb2\xfcq\xf6\x14\x02-\xe9\xe6\x14&{\x13$\xd7(!\xec\xfa\xe5\x08A\xc3\x9b\xb9k\x94x|\x9fL\xbce\xf3\xe0\xe3\x07\xa62y\xd4\xcfw\x9f5\xd0\xbb\xdcg\xdb\xaa%\xd3\x82%\xc6B\xec\x02\x8d<c\xbc\xa0\x9e;\xa9\x17\x03#[\xbf\xfd\xc2D\xd7JL\x93NM\x80\xd3\xbd\x8av\x0b+.\x8d\x04\xbf\xd4c\x97\xbd\x95p\xf5\xff5!yN9\x93\x1f\xad\x8ce\xf8\x08\xe2\xfa\xab,\x8fS\xae\xa7Y\xb3\xd1\x14\xb4k\x8e.g@\tc\x8f\xac\xb9\xb4\xb3\x0e\xa5\xbc\xceE@:\xd1Qq\x10\xc3O,e\x0c$}\x08m\xfe\xbdE\xfa%U\x1eg\xa6Ezn\x8c\x13\x81\xeen\x83\xea\xe6\x84\xc3D$\x8e\x809\xf2\x02\x03vN\x08\x9e\xfaR\xf57\xe2-\x03eG\xec\x1dHN\xb4\x00I\xd9\x05/\x15%{\xe4\x93\x04\xee\xcbu\x1a(\xad\xd6\xe8fAd\xeb\ni\x1f\xb0\xf3\xf6\x9c\x1c\x9a\x8eer\xd5\xf5P[\xe64\x81<\x8e\xda~\xe1\xf8J\xcd36\xd8\x00\xf1\xa6\x1e\xa85\xf7\x9e\xde$\x80H\xf5\x8c\xed\x82\xdes\xf0\xa6H\xb7\xba\x14r\x04\xe1\xd23;2\xec\xf7lQ\xbeGxR\xda\xeb\x16\x84\xc3\xd4\xc5\x9a-p\xfb\xfe\x16\x93t\x18qi\xbc\r\x14BO_V\xaf+m\xb4\xbd\xc0\xcfM9\xfe\xb7l\x9fn\xe0\xf9F\x16\xbf\\\xe6\x92\x94\xdcC\x93B\xa4\xad\x9fVJ\xb0\xc9\xc8L\x81\xf9\x9b@\xae\xf5a\x92\xfe\xea.8\rq\x8aE\xfa\xde;"\x10\nw!+K.~I@\x97\xcd%\xba\xa6\x84\x19\x015nW\xc7\xd0?\xdb\xa8U\x81\\{\x9e\xe54\x1aLU\xd2"\xfd\xaa\x08JE.\xb7I\xde\x890\xd8t1\xdbe\xafl\xe3\xc2\xa0\x8c{\xbd\x17\xebXs&o\x12L]hG\x89C\xe4\xb7\xf1\xa7Zg\x19\tZ\xc3(\xef7#\xea\xacO\xca\x8eRT\xa1c\xa9~qA\xdb\xce22\xd1\x87\x83N\x8cSq\xd9\xf7\xd9\x99M\xdc\x10\x00U\x95\xb9b\xac\x7f\x88\xe0\xb0\xef34\x08\x95\xa5\xb8\xe6\xc4\xcc\x96\x8d\x9f\xba\xe1\xd2\x17\xc10\x14\x057L\xcb;\xd9\x14\x7f\xa52\xe1\x0bQ\nO\x17r9\x8f4\x0e\xee_7\xd4\xe4;\xdef\xd1\x08\xb2\x90\xf2\x99yu\xd2\x06\xda\xad\x9cd\xd2\xfdn\xe9\xd7\x96\x15\xa7\x11?#\x07Q\xb8\xfd\xa2\x1a\xf1\xc3\xd1\xa2\xb5R\xf0\x8c\xfd3 \x9b\xcak\x87i\xc8\xa5\x03\xe0\x19C\x14\xeb/\x85\x9f\x81\xfc\xceh<5P7\xc9\xbf\xdaWV\x8c\x9a|\x1d\xc8Vn\xc1h"\xe0V\x8d\xbf#\xc5\x9d\xc2[\xb1x\x02R\xf6KL\x85\x93S\x9ab\x16\xa3;U$\xc0\x0f\xad\x81\\n\xbe\xdf\x01\x9f\x9c\xfc\x81\xc0\xd0\x08\xe1\xe4\x00\xedG\x9a\xe8&\xe3k\x10\n\xc9?w\x04ne\xfa\xd0fo\xa5\xb6p\xc8\x14=\xae\xc0\xfb\xfd\x028\x84B\xfc\x86xL)c\xe5\xa3\xb1\xc4\xc4^7~\x0bn\xcf\xad\x8aW\xf7\xa6LU\xd5&L\x81\x1aup-\xd5x\xee\x94\xd9=\xa1\x01\t\x0bZ\xbc\x86\'\x97\xa8}\xfd\x1d\n\xac\xf2Po\xccr,\xb0\xcfT\xd9H\xeb\xda\xd8\xc2\x8a]\xc7\xa7[\x01\x91 F*\x0c\xc9\xee\xa9\xe8|\x8e\xbbi\x10\xd2\x1a\xa03z\xca$ w(W\xe0K\xcd\x8ai\xe3\x07\rt\xef\xfeD`\x94|\xb1\xafK\xf65\xb3I\xb0\x1d\x1dNZ\xf9Qk\xeb\xb0\x0b\x8a\xc3\xe3\x90@x\x0f\xe8\xdd\x16\x9epm\xbeX\xff\xe7\xa30]b\x0f)\xda\xbd\xcd\xd9\xa1\x13\xc5\xef\xc9jM\xf8\xb4\xff\x8e\xce\xea\x92-\xdc~\xb8\xce\x99>\xc0N\x80\x1d\xdc=\x83\xca\xeb\xd4\x92\x14\xd8\xdch\x13\xea\xc5\x05\xe2\xf8\x96c\xed\xc3\xd4\xe1\xc4\xf5\xa8x\xde\xbd\xf2v\x8f.\\0\x01\x95\xc5u\xd3\x88\x0eF\xf4\xd0\xe6\x86.\xbaO\x93"\xe5<\x1dB\xf2\xc4K\xf3`C\x02j\xd5\xd5\x9e!4\x0bU\x16\x9e\x19-d\xf4\xe4\x91\xb0IF\xd1\xd5\xac\xf7\x1e\x0e2\xa7=)L\x08\x95\xfe\xa5W\xe5\xdcp\'\xc1;\x98\x91\x19\xc15\x9f<4|Gcy\xfd\xdd|L\xc0\x04d\x07\x96\r\x14\x06\x18\x0b\x19X\xb0\xac\xed"\xac\xf3\x9aP\x96\x90\x07\xe9\x19\xf5\xe2\xb9\xc02\x0f\x8f:\x00\x8f\'\\ Q\n\xcaDr\x89\x17z\xf3\r\x945\x1e\x11\xe5\xac\x9d\x89\x0f\xa2xSN\x06=q\x9f\n]\x98\xe9r\xf5\x8eDb\x0c\xbb\xa6\xc4\xf4\x96\x11\xd9\x8e\xbcn\xdd\xc3\xf6-\x8a\x1ba}\xb2\xc9\xc7\x98\x9b\x06\xc6_cX\xdd\x85\xff\x84\x1f\x81\xa4}c\xf2\x9d\xbaMcD\x96\xa6&\xc0\x0b\x0bqm\xe9\xd0\xdc\xb5\xda\x98\xc7\xe9\xea_\xdd\xe6\xe2\x99\xea\xf2\xbe\xb5\x18\xa0\xaf\xec)=\x1a\xa1\x04\x83W\xbc\xbb\xed\xaf\x1dv\xd1\x87$&<tW\\\x9d0\xb5\x15\x904@s\xc7*\x98\xc1\x85\xc1\x7fM"\x15\xb9\xaa\x98A\xd2O\xff\r\xe5\xab\x19\x94\x10h\xc6\x9c\x03\xde-\xa4\xa2\x9b;\xed\xff\xa8\x15nv\xc6c\xfe\xde\x96\x13\xd6\xd9\xff\xe8l\x15\x9bp\x11E\xc5\xcd\xf7{s\xd1\xdd\x8f\x7f\xe5u\xb5\xf1\x7fi\xc6$t\xe1\xe9\x96\xf6\xfd\xe3\x99C\xc5\x84\x98C=\xe3\t\xe2\xe7$\xf2\x88\xd0X\xe9\xe5\xd02J_\x9c,N\xc6\xb6\x06\x02\xcc\xdd\xb3\xf9~PP\xb5\xc4\xb1\rWT8*\x7f\xc7\xba\xef=4S\x89\xb4\xd2&a\x03\xe1\\,"q\xa8\xef?\x14\xbe%?\x00\x11E$\xa1v\xd7\x113\xffu_\x99\xae\x06h(#k\xfb\xfe\xe8`\xf6\xbd\x1cD\x14\x80ZI\x98\'\xec\xef\xab\xb611\xa8\x05E\x81\x9cO*u\xfa\xa21B3z\xedj\xee."n\x9dEO\xde\xc8Y\x92\n\xc8\xf4%\xa7Q\x10\xbd\x7f\x19\xc53\x00\xc0\xc7\xdd\xd6\\\xd1\xd6\x8e\xb4\xa8\ri,2\x06_\xf9-\xadA\x1d\x9e37\x02\x9a\xe7\x91[\xf6\xb7\xe8{\x95\x19,Y\xd9WX\xf2`\x06.C\x0fbt/7T\x11\xb6]\xf1\x11"l8\x11\x7fA\xce\x87UO\xa4\xf8z\x1b\xd8U\'\x93\x19\xa1\xfdE\x0c\xba\x94hX\xa5\x1d\xb7\xba\xf6nk\xaf6\xf9\x96\x0c\xd6x(U\xe7\xb3!Z\xae\x82Re\x0b,\xba\x85|l`EZ \xf6?\x9d\x84}`=\xe51G\x83\xb2rI/#\xd1\xd5\xb79+\xc5E\xce\xa8t\x9e/^mn\x9a*\xdd\x8efp\xd4 \xc5\x8dJ[-&\x99\'\xa8MB\x1fh\xbb\xffh\x87\xf9\xfa\x82Cx\xf2E\xdd\x9e\x1f\xfb\xeby{\xad\x03\xad\xd6D&\x95\xebI!B\xe7\xc6\xf8C\xd8\x13J\x00LU\'\x83\xd8\x99\x10\x94,\xad\x98\x88\x05,R\xdd\xbb\xb2\xe4\xb3Y\xb3\x9c\x8az\xc4V\xce\xa1\xfbm\xd9\xdf\xfd&+*\xa3I\x1e2\x14{\xb8\xcdK\x19%+&\x84\x11\xad,?\xb5\xa5\xda\x86\xcaIy?\xc3N\x9fyi3&5k\x83\xa6\x0c\x0c\x8b\xf4-\x9fj\xe8\xf8\x1a\xd0\t\xe7\xb9\xc4BnK\xa1{\xa2\xdd5>(|\xe4\x05\x94\xf7\xb0\x1eq\xfe\xac\x0b\xbd[09\x0f\x01\xe7\xb2=\xc6\xe7\xa3z4\x81\xe3":\xb5\xa9\xa1\xe4/5T\x0f9\xfcbc\x11?\x90Na\xa1\xea\x0b\x87Ax\xa2\x97\x14\\\tI\xcd^\xd3N\x93\x97R\x14\x19\xd5Sw\xc2\x15\xc7~\xe6\xf9c\xa6"v\xf7T$\xad.#\x08\x95\xe78/\xfd\x0c\x1d\xd6\xadkr;s\xc6\xac\xda\xd4y1$\xa0\x96\xed\x90\x89p\xb5\xa3T\xfe\x89\xaf\xba8\xe1-\xc3\xca\xc3\x86\x11\xbd\xb5t\x02\xd9z\xd2\x82\xc31\xe9\x952{V\x0c\xfc=\xc9R\xa3\x86 \xa0T*\x9b\xbf\xed~\xedP1\xd3\x08,\t\x1ana\xba8hZ\xe4\xeb^\xdc\xccw\xe7\x96\xf5~\x99\xeb>bI\tv\xe5\xdaQq\x10)\xa2a\x94 v\t\x82\xad\xbd\x8f.\xdf\xce\x8c\x0e\x94\xbe\x97*L\xb0\x8bo(\x98\x90\xb1\x95\xa4\r\xe9Q2\xf5\x80\x0f]\x9fL\xe5\xf22\xb7\xf8\xd1.+\xbaYr\x07\xf6ve\xcf5\xc7\xd09\xef\xb9Dy\xa1%\x95Q\xd1\x1ez\x87X\xf3W\x9ed\x0b\xdaC\xa17~k\x93\xb7\xc30\n\xe9\xf4\xbe r\xd0o2\x07\r&Q/\xedU\xc9\x15\x03T\xe1\xc1\xb1\xebF\x96\xc3\xdc\xb8P_\xaa \x16\\M\xbd\xc5v1\xd7k\xd9\xf3|w\xd1\x94\xea\x06X\xcfHt\xbb\xc1}\x03HP\x00I\xfe\xd4\xef\xe3;4C\xba\xcde\x16~HG\xca\x16\xaf\xe4=w\x1e\x86c7*\xe2Q\x84\xfb\x88k5%U\xc6U\x94\x95\xa4\x0e\x83\x8e\xac\xc4\xd9\xcc2G\xb8mIw-\xd0\xce\xcf\xd5\x11\nC\x8c\xf5\xf2\xae\xbb\xd6<\xd4%\x86\xcc\x1d{V\xb9\xae\x12\xec\x15\xad6\x81^\x08A\xcd9\xeb\x19^\x89\xcb\x16\xf5\xad\x08\x03+\x8d\x8b\xa6\xce\xa9\\y\xf1\xb1\x9d\xae\x04\xee\x07\xe8"\xa8\x0b[\xeap&\'R0d\xf9\x03\x10\xe0n\xed\x86\xf1\x95qp\xf8\xbf\xf5Y\x99\x07a\xc6\x97\n\xe1\x08F\xee\xa9\xa8e\x88\xf6\xfb\xc2\x8a\x891\xecG\x06\xc6\xcb\xd1<T+x\xfc\xfbNt\xabk\xeeL|\xe8\xca\xb9\x8c\x0ftQ\x92\xd1u\x9e\xbb\xff\xdafb\x83\x1b\x9f\xf5\x896\x05\xc8#\xa9\xd6\x94\xdfu\x14\xefK\x0c\x81\xd3\xdfr\xb9\x18\xc2\xbc\xcf\xcb\xe5a\x06\x891v=r\xe8\xde\x12M\x91\x8d\xba\x8e\xce\x0c\xcb\xe8\xc8\xb8\xd4\xa3\xd1i\xd64e\x00\x04\xde\x83\x16,\xaf\x97\x07\x99!\x8f\xdcu\xa0$+\xe4\x8d\x8c(<\xca\x0c>l\x07\xb16M$-\xe2\x94\xfcl\x91\xf1F\x89\xd6H\xb5dHiiT5"D\xd9vV\x15\xfe\xb0c\x8f]\xae8\xe4\x07\xb5R\xd6\xf4\x0f\x11>u\x1e\xac\xe7C\x8f\x17\x98zL~3cO\xf1\xd5W$\xa9\x13]\xd4\x9b)\xdd\x986d\x9b\xf9\x0e\xb9\xcd\xc4\xb0\x8a8 \x91\x11E\xa3\xcfG\xe1W\xb5\x11\x89[Z\x88\x82\x86\x1c3W-\xd1\x11\x1cj\x93\xdca\xe6\xd6\xf8\xca\xde\x08\xe1\xa6\xa3\xdfr\xa0\x8b\xc14\xfeaA\x97z\xc6\xc1\xb5W\xcfZ\x85\x0b\xb3\x15\xb7Y\x88\xb3C\xe2Y}\'\n\xce!TV\xb2\xc9%\x9e\r\x86\x07z\xf5\t=\xfc&\xd6\x9f\x9e\xac\xaa\x14\xc3\x99\xd6\x9f\xec\xb1i@\xc0\xa2N\xd5\xdded\xc6\xce\xeb>\xbeNu\xd5\xef2\x94z\xcd\x1b\xe0\xd7qS\x8awz2\xfa\xf5~\xb0\x8d\x0e\x93|\xa6\xfa\xdc\xc1\x95\xc5\x13\x7f8\xb69MyA\xdc\xca\x02\x83\xd9\xfe\x16\xb8\x83\xab\xd3l\x87\xbb@\xda\r\x19\xc0\xf6\x7f\x89Ttry\xdbS\x8b\x1bv\xd7\xf6\xc0\xa6Q\xe1\x96\x90\xd8\xf2\xbf4\x1d\xc1\xc7\xfaI\x99\xf1\r\x8d\xcf\x0eKSD\x15\'\x0fTG?\xb3\x12\xd2\x8b\xd4u\xbd\xf80\x83|\n\x84|\xb7]8\xd5\xf9\x07\xfay<z*\xb8\xc2V\xd2\xcch}\xe36NF<\xe1Pn\xcd+\xf8\x90\x1d\xf4\x10\x0f"\x0f\xc1\x1e\xcb\xae\x99\xe3\x97\x8b\n\xdb*\xd5\x9f>\xfd\x01Z\xa4\x10y\xdd\xf1*)3\xa6LX`=\xe3N\x8b\x00s\t\xbc\xd8\x1d\xadS,\xd2\x9d\xc7\x92\x11\xa4\x02\x85\xde\xae\r\x122ET#\x137g\x9bq\xfe\xbe\x88\xc6]Kb\xdc\xd1\x1eL\x99\xc6\xee\xdb1.\x92\xf2\x7f\x88\x8c\xb6q\xc8\x91\xf3\xc8\xe0K\x06\x04\x84m\x95\x1b[\xd26AV\xf5\x18~#\xb1\xb9~\xfe<\x03rn\xad\x99\x19\xc6\xd9y\x82\x03|\xed\xb1\xab\xd6(\xf6\x12\xfe\xe1|\x07\x9dw\x85^1i\x06G\xdeMb\x9c\xf3A\xfa\x85\x02;,\xf2(\xbe\xf0\xe5\x8b\x1e\xdd\x1e\x14\xab\xb6\x96\xa67\xa6\xe0\x856\xf4\xfc\x16\xe33\xab\r\x9a\xee^R\xef\xe1\xda/\xd0\xc8\x19>@\xf1R\x7f\xd3\xf4\x01\x9b7A\xa1\x17\xcd\xe5j\xcfT\x00t\xd2r\xd3\xe2\xb6\x92\x94d\xcc\\\x1b\xca\xa0\xa7\xe6\x8f\x9a\xb2\xcc\xbf\x1e\x9e\xe1<\xe6;f\x84y\x9b\xde\xfe-\xce\x9b/\xad\x839\xcc\xcct.k(\xae\x16]\xa0\r<\xdc \xa8$\x90\x89PeL\xa7\xd1\xc9\xbc\x81i\xba0\xe7<e\x16\xf3\x1aI\xc8lx\xa7F.P\'(\x1b\xe8(\xfd\x13\x12)\\\xa1\xba\x7f~\xaa\r\x97\x1ch\x17d\xab\xc0\x1f\x18Ea\x94]|\x80\xec\xd2q\xce\x8d~\x92B\xec\x89\xb3\x85A\x1a\x16\xda\xf7\xa4\x174\x1cu\xc0q\x81\xb4\xa7`7\x0b\x85[\xc2 \t\xf8\xdd\x89\n\x87\xe5\xed\xa7\x13\t\xdd/CX\\Z*\xbaoQ\xa3/\xfd\x00^\xd7\xea\r\xb1\x87\x908*\xdd B\x95E\x12G\xd1\x00\x9f\xde\xcf\xa1#\xed\xcf\x0c\xc4\x96\x1cn$\xc5sP\x96\xbc\xb9:\xef5p\x05w\x8e\x81L\x01\xc7\xd2~,&\xd4d\xfc\x15D\xf0\x03#\xd4\xedK\x19\xc8\x8f\xedD/\xfeJ4\x7f\xf2\x97-\xf7\xb8\x18\xa3{\x86h\xdb-\x17\xc1\xd5\x0f\xcdd\xbcaS\xa2\xe7\xbd\xaa8\xe0%\xfa\xedA\x12\xe7"\xb0p\n\xc4\xf2\xe8\xa0}\xd5\xf3\xabf\xdf\xd6\xffxvE\xd9)\xf9\xbdk,\xefz\x91\x98:wNH\x97w\x85\x8a\xcd\xad\x1e\x8cl\x9e\x83\xf3\xfcs_\\\xb7\xf3vt\xe0\xde\xb0$\xe1\x98\xdeDAi\t3(i;\x1f\xd9\xbcG_\xe0\\\x1aZ\xfd\x1a\x85\xd2!\x10\n%o\xf3\x8f\xf0\x83\xba\xfc\x1bl\x98\x02\xd1\x9c\xaf\x82\xa3\xbc&\x17.\xe1\xe8\xc9\xf4J\x0cX\xe8\xad\x90\xb2\xb6d\x03\x8cL\xcb\xb2\x85\xb1\xca\xf1\xf7g\xb3\xeb\xae\xe0\xc3-\xa9\xee\xbe\xd6D\xd0\x94\xfdtU\xa5\x033\xaf\x94\xd7\xec\xac`)\x95\xfa5\x81\x05\x8b\x03^\xfe\xbaE\xc4R\x89\xcfs\x1a~\xccz\xb0;\x161\x06\xd58\x95\xba\xf4\x86\xed Mh\x02\xc9\xb3\xdf\x8c\xc1\xd8\xba\xb3\x84\x91\xe2\xfa\xb8*\xd78\x88F4\xb9\'\xc2\xea=\xda\x8aD0\x83 \xb6\x9d\x0e\x0e^fl\xe8A`\xc0\xb5/\xdb\xa9y:\xbc\xfd\x06W\x87\xeb\xd2m\xc0\xf8\xadlS\x94\xfe}\xb6\xb2a\xfc\xde\xc5\xc4|\xad\x10\x85\xa7\xca\x89l\xaa+\xaa\xd5\x8f\xaf`w\xae\xf8x\xd5\x81zj\xcf\xbc\x1dT\x95A\xba\x86\xa4|\xb4~\xb3\xe5\xd7B\x8d\x89\xea\x9f\x92\\\xab\xa5\xca\xca\xc2w\xda?\xe8\xa5d\xd5_Pe]\xdcr\x89\x9a\xfbt\xe5\xbd\xd7r\xc26\xd8\x06\x88\xceF\xd0\xc2\xbc\x00\x18dv\x94\xeb\xd4\xd5\xfb\x89\xdf\xf7\x99\x08!{\xb8\xa7\xa2-\x91%%S=\xbd\\7uc?.\xfa\xa1\xba\x17\x0c.\xf9\xb5\xed\xbb\xd1\xb9\xb6\xea\xda4\x9f}N\x0b\xc97\x03\xe1\x17+\xb5\xb9\xf5\x1c\xa2\xc8v:Q]\xc6\xdc\xcf\xf5\x8b\x91\x138O\x8e\x9e\x83\x1b7\xe0UC\x1e\x0fM\xb1&\xee\xf1\xdbx\xcd\xf4\x1fh,\xfd\xa7\xf6\x81\xda\xa7V\x9f\x14\xc6\r\x9d6\xc9\x94\xcf\xc8\xb1\xc1\x1b\xec\x83\x8cc\x9a\xecXT\x0e\xba\xd7^p#\x99\xfd\x8ae\x18\xfd\xf6\x96\xb00\xeeC\xfcC\xd9T\xbbc\xf1\xe1\x0f%\xc0\xde\x18\xa5dR\xb7\xae\x0e"\xc1D\xe8%\xa7\xf1\x8b\xc8\xec\xfa\x91Y\xaa*\xab\x1csE\x03\xfd\x9e.\r(\xe4dl\xe6\t\xfe\x7f\xb4\xd6\xac\x00G\xb1\x7f\xfa\x0e\x14VC;\x1a\xf4\xbf\xf9w\xfd\x90\x83\x14\xd0\t3\xe6\xcb\x93\x14.\xbb\x91b\x0bd\xed~\xb07\xc1\xbc\x1a\xb5^\xb6\xa4aH*\x94,\xf5\xc5\xe2\xf4\'hJ\xde~||8[]\x93\xb7J\x9eQ\x84Z\x1aB\x82F;;\xe4\x89M\x1e\xf9.>y\xc3T<FW\x87\xcb{\xac*a\x0c\xda*z\x04\x17uI\xd5\xd9};\xc1>za\xc8\x80\xa7{\xab6\xf2\xaa\x1e0\xb2\xd3\xdf\xdd\x99\xdd\xa8\x02!\xcf\xeaY:\xda[\x07\xb4\xa33\x94L\x00KZ%\xd8E\xa2T\x87EJ\x83\xd0~Bn\xf32:\xdb\x00\x16\x9f\xe2Par\x0b\x7f}\xf3\x0f\x8b\xa2\xdb\xc0\xdf\x91?~\x9e\x1e\xa7Q\x02G\x8d\x0bY\xd6\xcc\xf6\x962\xabG\x8a\x94[S\x1b\x9a\xcd\xf8\x84)\t\xba\xcb\xdd0\x1f`\xc3`\x07q\xe7%\xc81`\'7R\xcf\xb5\xf0\x10\xee\xa8\xcb\'\x7f\xf6\x18\xbe\x10p\x181#fF\xb7W\x9d\xd9\x9c7M\xef5\xceps\xfd\xb5\xa4C\x7f\x88*\xbf\xccFGJ\x139\x96\xfa\xe8!\xdbb\xcd\xb3@o\x16C\xdd\xc3?%B\xdf\xc6T\x95 \x97DAo.0\xf7\x85\x1d\xf0Ys.\xfb\xcc\x87;\xa7\x08&6/\xad\xe2L\xf5\xfb\xad\x18\xd7\x93\x88\xc7P\xb0-\x14M\x1e\xef\x9bBC\rU\xc5\xa7.\xaa K-\xfde\xf4\xd9\x0b\xd9\x0ft!\xd2\x07\x14\x12\xe0i\x07\xfa\xae\xd2\xec}\xd9.\xe7\x9c$\x19\xcfY\x9b \xb3\xc9N\xf7@\xd2\x1e\xc8\xe1a\xd5l\xf7q2l1\x0cGC\xa1Rq\x87J\xa78fOv\xabV{GB9\xe1vwO\xd5*\x8bD\x92\xb1\xcf_\x84\xf0\xb7\xdbL\x0f+\x02G\x91p@\xd0x7.\xd0 \xe8G-F\xc4/D\xc8\xe0I\xa4\xc5$\xd0\x83\xed\xae\x89c\\M|\xd7\xea\xea\xadT\xd3\xcdLe\xa3\xaf\xeb|\x08\xedLCr<%\x13\xc7\x92\xdfg\xd79\x8dv\nk\xde\x91\x1dV\\\x07)\xfa\xc7L_\xd480\xf6,*\xc2!R\xdd\xa2c\xf5\x0c7\xdf\x98U\xf8%y\xefN{\x9dQ+Ds\x0fV\xd0\xc3\xf3q\x8e\xc5\xf5\xc84v\xd655\xe5\x0bq\xf7(\xf0\x96_B\x05\xa2Y\xeb\xe4\x11-8\xdc\x91\\\x18\x9eDFWt0\xef\xcd\xd0\xde\x0e\xc0+\xd3\x9b\x93\xcbOH\x88c\x0c5&\x87R\xda\x9d!\xf3\xd29Bg\x11\xa9L\x05\x94f\x1b\x0f\xdcd\x8c\xe1HH\xdb\xb9\xf9\xfc\xb0\xbbVArz(2L];\x05\xddV\xf7\xa7Jng\xc7 \xa4(\xeeS\xc0\xc6\xa3\xbf\x08%C\xfbyr\xed\xd6U\xa1\xfbt\xff\xa4\x135\r%-\xc1o_\xfb\x18\x80V\xe3o^\xc0c\'\x18h\xb8\x8e~\xe4\xe4\xfd&\xbe\xbb\xd9\xb2\xad#\xfc\xf4*{\xe9Y=\xb1\xc1\x7f-\x96"\xe4\x14\xc3\xb8\xb7\x1a\xac(\xd2\x19%\x1eJn\x11J\xec\xdf\xe2H\xfb\xde\x83\xe2Z\x1dd\x84\xc8<\x0ca\xa6\x0c\x055\xd4\x9b\xe1L,~\x89\xff\xab\xa4d\xb4.\xde\xba\xadP\xdb:L\xcb\xdb\xec9\xf4)X?\xb8\x1e\x84a\x11\x8f@A\x18\xb8]\xf4\x16)H\xfe\xe2\x00Hy\x1e\xd6\x12\xd2\xae\x11\xdc\x9c\xeaj\x93G86\xc3\xb7\xa1\x9d1SN\xe1\xb4\r\'\x19;\xea\xe5Xb\\\xcb\xefGv\xc1\xb6\xb6\x98\xc7E\xd3\xfbv\xc2\xb0\xd7\xcaE\xb3\xb669G\x07\xb22\xceh\x0e0\xcb\xb5\x7fK\xd3#H6\x94GA\xad8x\x82\x9b\xd4\x96\xd0g\x0cZ\xe9&\r\xfd\xbb\x93X\xad\xa7\xdd\x1a$\xd1\x0f\x9f\xb4ES\xc9\xbc\\-~\xa6\xd2\x9f\x93<\x9e\xed>G\xf1\x02}\xd6\xdd\xac\xafS\x0cX;\x9a\x8eva\xde=\x06\n\x95\x1d\x87ps\xff\x96\x83\xc1G\x01\x8f\x1e\x12\xb4\xa5\xca\x8c\x1a\x86\xa0\xff\x8c\x03\xca\xfa\xa6f\xe9\xfa\x06\xadM/\xc2\xe2\xc9\xad\x9c\x97a-I7\xa1k&\xab9ah\xf6\xfd\xfaW\x8f\xdef?VF\xe8\x7f=\xb5\xa2\xb1s\x1c\xecMK\x1a\x0b\x94\xc4u\xb0\x12\x98\xe8\x9d\xba\x98}\xd0l\xd4\xa7N-2\xb2\x18!\xacf\xb46\x16Gc\xd0\xecltB\x84\xb79y\x94\xa4\x18\xdf5\xae\x97\xce\xf8\xc1}\xe3\x1fc\x99\xd8lf`\xb8/\x12\xea2M\xa4\xb5cE0\x13\xc7\x16\x7f@\x93\x0f\xab\xa9\x16<\xbeE|y\x91Fo\xbe\x98\x8br\x94\xd9.6|\xd9\xb5G\x10\xea;\xd0\x08\xd5\x14\xbb\xa1TD\x9a\xef\xe4\xe8\xe7X\x94\xb68\x1c\xf3\xae.\x164\xa2\xab\xacN\x88\xda\xd2\xbe\xdbh\xa5\x7fi\xca\xbdqP\xfa\x05\x03\xdcy\x97\x17\x1b\xb2\x8fU\x19\xbc.B\xc7s\xffy\xbc\xe1\x81\x07\x15\x83\xd9\x95\x08\x0bM\xc81Ok\x18L-\x8f\xa1\r\xb6\x18\xb29c\x95g\x95\x96\rz\xf2\x0f\xd0[\xab\xe9\xe8m\x98\xec\x95\xf9\xca\xb1\xf1Wt\x94\x19D\x85\x07cdy\x07\xeb\xa1.]\xd2,\x86\xfbz\x85\xfeS\x91\x8c\xe1is~\x97\xf9\xda\xafA\x8f\xbfB\xd9\xa4k,1\xbep\xde\x10\xc3I\x9c\\f\x9ee\x1b\x01\xd3W\x84\xae8W\x90\xeb\xae\x94\xb7~0\xb3\x1c\xccX\x13m&\xc4\xa2\xba\xb2J\xf5\xcbw\xefBe\xce\xa8\x05>\xfcc\x82\'\x19\xad\xdc\x89\xd6\x1c7C\xf9\xb7\x1e4aU\x97\xb2\x0cM\xb5\xd4\xce1\x05o\xbb\xdb\xa3\x03-\xa8\x8b\x7f\xca?\xd6!\xcb\x9e?\xe1R)\xebh/\xd3\xec\x0f\xdeR\xeeT\xe6\xb13\x02\x9cM\xc7\xa7\xf7*\x84\xce\x0b^\x96%=\x8e\xb3\x16\xd8E\x94\x0f:\x87i#\r85_\x82\x1b\x8e\xad\xf6\xdc\x9eW\xad\xc3\xef\xcc\xfc\x87\x1f\r\xba\xdfU\xac1\xd5\xcc~\xcd\\\xa7\x84^$7\x9a\'\xd6\xfb\x1f\xc2l\xf92\xca\xd6\xdb\xfb\n\x8c\xca\xeew\xc9\xa4\xd9T\xf3\x88P\xd4\xd8\xc3.>\x1f\x1b\x95\xd1\xa9\xeb\\\xac\xdc\xbc-!TS\x8a\xe6\xf4\x91\xff\xe6\xd8\xb9X\xe9\xf7h&Id\x96\x82\x12fd-R\x18\xba\xd6\'}J\xd8\xfc\x94\xcd\xc5a\xb2m\xa0*{P\x91g\xd4,\xcc\xa3\x19X\x0b\'\xbfw\x97\xde|(s;\x18\xcb\xddzy\xcf\xf4\xc50\x8e\xd0Y\x9b;0\xa1M8D,\xf4\x1f\xdfR\xb0L\xa9\x1b\xe3w<\xd5\xfa\xde\xf4$\xd1\xc0\xdb\x98\x18n7\xe0\xef7\xf1\xaa\x01\x17\x0bc\x86Y\xcf\xd8\xc8xC\x9a\x9b\xa3\x08+\x82\xa9\xedN\\\x97&\x8f\xc3Qg}\x83\x82\xe1\xfb\xfd\xb9o\xbb\xd8\x07E^M\x19\x9e\x85\xc3\x8e\xe98\xa7\xf9!D\xdd\xa4\x7f\xac\x02!\xe2\xd2U\xe7\xf8\xbb\xfaC\x9e\xf5Ds9\xb6\x12\x0cU\x17{\xa0}\xbcf\x00<@\xc3\xbfW\x0f\xddO\xa9!*\x84\x8c\xa3\xb9\xc3\x06\xc7\xa7\x00<@\xc3\xbfk\x97n;\x12B\xbe#\xcf\xf6\xcc\x89\x92\xcc\xf4\x19l\x02>:\x04E\xf5\xd1g\xa6\xf2\xf9L\x13DS\xa6\xe4\xec\x8b(\xe5(\xd8]\x80\x8c\xadTB\x85\x96\xa8ss=I\xa8-\x15\x98\x070L\t\xb6\xa1\xf6w\xaa\xfee\n\x94\xc0\x94\x9c\xa0\n\xc2u\xc7ZznO\x1c\xeaqE8\xbdg7o8\xd2\x1f\x8c\xf0c\xd4\x19a\xce\xad\xdb%f\x8e\xc7\xb4\x06yrX\xf0\x1d\xdb\xae\xac\x8a\x008@\xc7\xbf\xb4\x8e\xa9\x9b\xa2Jf\xad{\x95\xf5\xe1oew\x008@\xc7\xbf\xa01\x8a\x0ca\x14}\x1dn\xf7\xb7vz\x9a\x10\xff\xe0x\x9c\xa4\xba\xdb/\xfe!\xd6k\x9fVH \xbe/@\xa9\x83cn\x85\x8e\x19]\xdc\x9b\x18*L\xe0\xf6\xc28k\xb3pee\x18\xb62"HL\x9a\xf3\xcf\x18\xbc#\xba X\xf8\xde4\xbc\x9b\xdd\xcf\x8e\xd3\xd3\x8f\x825\x15\xae\xd1y\xc2\xe2\xe5\x82!\xfe\x1d\x9f\xea\xfe%I-&\x83\xd2\xed\xe0\x1f!\xc5\x12}\x002@\xcd\xbf\xed~*3\x8b\x04?\xc7\xf5:\x8bT\xaf\xb8\xa3\x002@\xcd\xbfu\xf1\x8f\x1d\xc5\x1d\x02\xf2\xd5C\x0cy\xdd\x06L\x15eK\xf0\x86\xbf\xf5\xea\x80{\xacF\xecY\x89A\x12\x9a\xb4d\xb1G9\xf4F\xae\xb0\xc0\x02tzWH\x11Xl^\xbc\xa4\xca-\x9a\x1dS\xcc\xec\x0e\'_^Yv\xaf#s"\xa8\xe8\xe0\x15\x9d\xa38\x04\xb7\xa1\xf1_\x95y\xde\xf9R\xd4\xb90\xcf\xf0\xca/\xfb\xf0_\\0\x03\x10[$\x87\x94\x13+b!\x18\x03Q\xd2Q\x82\xce\xf3\x000@\xcf\xbfOZ\xfb\x89\x95q\xe7\x8bI\xb5\xe6\x1eYf\x00,@\xd3\xbf\xe3-`T\xb7\xe7\xa4\xe8\xf5]\xca\x05\xc3\xc8u\xea\xa4L\xbe\x02h]\xa0@\xeaEs\xa0\xc3\xdc\xd1J\xc1\x9a\xe6^\xf6\x1b\xf7\x04\xdd\xdd\xc1\xa9\xdc\xbb\xc2\xe6\xdc9\xaa\xfaP7\xb1\xa6\x13\x1ci\x15~\xb4\xc65\x81\xc3\xb7\x0c\x87\xc0s\x90\xa7\xe7\xdd\xaa\x89\xbbr`p\x06\xaf\xcb\x8b\xf1\xb6\xfe\x89\xb8&D\xe9\xa3\xc0\xd2\x8d8\x05\xd6\xdb\xdb\x9b\xee\x9fk,\xae_K0\xe8\x8d)\x00,@\xd3\xbf\xf6\'v\x88\x14[\xd1\xe2\x1a\xf8\x08\x00(@\xd7\xbf\xbe\x1b\xb59\xfa\xa6\xa2\xac\xacZ\x18\x08E]"f1\xfc\xf2c&qh\x82\x1ba\x95>kXS\x0f\xc5\xd9\xa1\xbe\xad\xdd\x14\xaa\x8bl\xc6\x12g\xb50r\xcdu\xc5\xf4 \xb2\xaa\xf6\nF\x7f\x14\xd6\xf0\xe8E\xd955\x96<B\x85\xabq\xbd7\x1f\xaf\xd3\x84\xd3B\xda\xde\x17\xe5\xeeB2#\x87|x2\x7f6\'d/\xba\x92\x12\x9f\x96\x955\xd9\x85 N\xd9\xb0 j\x1f\x83q\xe0\x88\xb9\x0by\x00(@\xd7\xbf\xd7\x03G\xd1\x9b\x05!)Hd\xd6\x16\xb3oA\x00$@\xdb\xbf\x14\xf4\x03\x85Xs"k\xafm%q\xa8h\x8d?\xd2\xaf\xb6\xfe\xe2\x84\xe7\x83\xeeo\x82\x92\x17_\xbb~G\xd6\xff\x8b\xa54\xb7h\xe1P\xe4d?\x86\xfb\xd8Pb\xe3sKz\x93k\x89\xbe/\xe8\xff\xe7\x0c\xd9\xabs;H#\xad{\x00\xf8\xa8\x94++\xfb\xfa7\xbcm\x15-[\xffX\xb6Z\x13\x8f<q\xbc#AJ(\x85\x8e\xf6\xf9`\xdeF\x87\x80\x91\x9a\xb8x/+Q\xa3\xd6\x92\x9d\xb3\xbf/\x00 @\xdf\xbf\x9b\x18\x9e\xb0\x1d\xf2\xca\xb1\x15\xa2\xa4\x9a\xd3\xf6\xaf\x00 @\xdf\xbfV\x8b\x15\xa5M|\xc7-)* 3\x1f\xa4\xdfAf\xda\x88D?6\x95s\x06G\x0c\r\x9c\xa1\xc1\x9f#R\xe8v\xc8[e\xf8\xdf\xfe\xe9}\xb1i\x07#?\xee\t\xbd\x99\x04#b\x86"!%\x99\xfe\x04Z\xe1\x89\xe7\xa3G\xd0)\xa1IW\x12\x01\xe1\xa1\xeb\xbcx\x9c\xc1\x94\xc32\xadU\x1e^ \x0fT\xf7wx\xea\xe5M\x1e\xd9\xf7\x8a\xf9I*\xce\x17F\xb0u$\x8a\x00\x1c@\xe3\xbfV]pM\x87\x12\xa33\x9f\xfe:^qjv\x00\x1c@\xe3\xbfIC\xf5\xdeq\xeb1-\x1a\x1e\xcbF\xd6\x1f\x1bT\x1b\xe3S\x82\x0c\xb8\xefOr\xeb\xb3J\xdf2\rF\xe3\xb8$\xc3\xe8\x1e\xbe[\x8b\xbb\x8a\xfe\x10\xf54\xd6\n\xf2\xe7\x9b@m\xcd\xb8\xb81\xe7\xecV\x89\x0e\x91\x1f\x97d\x07\xf4\x95\x08\xed{\xaf\x0fX\x1a\x9c\xb4\x0b\x96\xe7\x9eg\x1c\x00\xf7\x1cN\x9e\xcbqzB\xc7\x19\xa0k\xab\x91b58\xd8\xd2\x84\x17\x95\xbd6\xeeL)\xc7\x00\x18@\xe7\xbf|\xc1\xa7\x96k\xc8\tf\xbd\x14\x7f%\x8e\xa9\x06\x00\x18@\xe7\xbf\xda\x8d\r\x168&\xfez\xbe\x15\x9d\xf4L\x06\x13\xaaa\x193<\\\xb2\x94\x11u_!\x0e</\xf6\x1f\x9a\x15\xeb\x02\x1a\xb1JC"\xac\xe3\x95\xa7\xb1:7\xb5\x11\xf0\x96\xf8\xeam/q\xc1\xd1\xd6\x93\xd3\xcaS\xa1\xe0\x0f;\xf1\xbb1\xbd\xdc~\x9a \x1a\xe9cr\x13\x9d\xb9\xa6\xb6\xed\xfd}\x1a\xbeL\x14\xa8\xdc\xd9\xdfU\x01\x0cCH\x06u\xa6\xa9\rd\x86\x05(\xd9\xfbZ)\x94\xe1\xf4\xe8\rW\x95i\x8a\xd8\x1b\xedI\xec\xf7\xcdcb|\r(o{\x12\xd4\x7fO}I\xc2\xc3\x076\xb1\x88\x82*\xe0\xc1\xecmB&\xd2\xf8dN\x8fz\xad\xa6>\x88(\xe1\xfe\xd6}r\xe4\xd3&\xd2\x9ae\x10X\xb4\x99\xf4k\xfd\x7f\x00\x1d@\xe2\xbfY\xc11\x9eD\x12\xa0\x0c;\x92\x00\x19@\xe6\xbf`\xa8\xeeE\x83v\xa1]\xec\xd1\x0e\xed$-S\x12i\xa3\xea\xc0\xf20\xad\x93\xcb\x7f\xc8I\x0f/ML\xae\xab\x81d\xe0Ad\xbd\xf8{\xfcK\x1dz\xc9C\x97\xb1\x94\xc7\xa4\x16G\xb5\xd7\x8a\x1e:\x06k\x15\x02\xcb\xd42\xd8\x92\xe1\xb8+\xbd\x97\xadl\x88\x185y\x90s49\x04\xb15h[\xb2\\3\x192\x88\xd6%\x00\xaa\xd3\x9ck\\\x97\xf4J\x19\xc0\xc6\xb8\xeb;Q\x00\x19@\xe6\xbf\xd3\xf8T\x04\xa4\xbb\x8b\xacL1<>[o\xe8\x00\x15@\xea\xbfk\xd6\x8b\x1bD_!\xcfI}\x1d\xc2\xed\x9e\x8a\xe3T\nQ\xbc\x08\x17\x1bJD\x96\xf6%#\xf1H\x8c^5W@\x891\xe1\xe5a\xdb\xd8t\x88\x05\xf1\xd4\xd2\xf3pV^\xf7\xd6c\x0e\xdaA\x9aFI\xcd\xcb\xf9\xc7t\xe7\xca\x06\x97\xcf\xc24>\x03\x81~s\xdb\xbea\xd5\xf6\t\xb2\xc9\xd1P63&\xacU\xf8\xca\xd2^\xb2o/\xe1\xa7i \xae3\x81\xa9\xc2n\x81\x9dV\xc0\x0e\x85\xcf\xb0\xf7Q\x9f\xe2\xe1\xfa\xa0%u\x18\xfc\x93\xabq\xfcv\x1e\xd34\x00\x15@\xea\xbf\xc5kY~\x9d\xbd\x92\x00\x12@\xed\xbfc\xa2\xa9s.\xf4Z\xda\xa1z;[{\xa6\xd0\xb9\xa7\x94\xfc*,=}\x01(H|\x93\xfd]\xc2\xc2\xfeh\xc5:\x8f\x0e#\xc1^\xe5O\x94\x13\xafr\xb1\x85\xc3\xb0\xede\xe4\xe5l\xdb\xf6\xe3\xa0%3;\xf3\x13\xad\rGA\x17\x06\xe0\xdeA\x85\xd5\x90~\xc7\xd5i\xc5\xa9\xe9\x93\xa6\x19q\xda\x081\x14\xeePW{ \xed~\xa8\xa3+\xfcEm\x0f\t\x07\x04\x16I!\\\xce\xcb\xef2<\xbf\x19qaa\x03\x0e\xe6\x00\x0e@\xf1\xbf\xae\x11\xb7S9\xa7~\x1a\xd1!\x93\x1d6m\x12\x00\x0e@\xf1\xbfc.t\x8bY\xec`@?\x9fu\xb9\xf3\x8d!\xf3#\xdd,\xf5\'\x97Df\xf5(\x8f\x92\x12\x87K)3\x99\x80\xbbP\xb9|v\xb9\xfd\xd7\x1b\x8c\xc7\xd6\xb3i\x91q\xc2\xc6\xa1\xd8\x13\x89\xb2\xb12\\\xa7\xd4\xc5\x8a[\xefT\x8d\xaf\x83\x93\x04\xdb\x85j\xdcf\xd4}%\xb5\xe9\x88\x7f\x9a\x0f\x8fC\xf6\x98\xdbj\xcbM\xbbdc0L\xe3t\xb1 \x08\xd6\xf37D\x9cC\x90(\x8c\xaen\xdb\xb6m\xdb\xb6m\xdb\xb6m\xdb\xb6m\xdb\xdcm\xdb\xb6\xdd}\xcf\xfb\'\xaf*\xa3L\xd6\x1a\xa5\x92\xc1\x17\xbd\xeb\xab\x12\x89,\xc1Hy\x8a0\x00Q\x81\xae~l\xc5\x07\'\x02}M\xa8\xa2K\x04\xd2\x94\x1aw9\xc7\xed\xa5kxW\x15G\x0f\xdaC\xcb\x16(1\xb7\xeb\xa4\xbe\xce\x94z\xa76\xf8*Uw\xec\xc4F< \xd5\x0e\x8c\xa0q\xde\xb8\x06\x1b\xa7\xed\x01\xbb3\x05\x9bn9\xcf\xfa\xe0\xe0\x0e7\x8bQ\xcfyc}H\x9b\xd1\xa5b\'#e:i\x07\xff\xf3\xb9s\x8cDB\xd9\xa9\x07\xe2\x7f\x1es7\x17&\xfc\x86\xe26\x8aak\xca\xd4\xc6\xd7X\xa0\x13\x15F}\xc7!N\x9f \xd3\x15\xffy\xa8,\x80\x9c\xff\xd3<Q\xd3\xe3\x9c\t\x05\x10\x14\xe8\xebo\x93\xf5\xde\r\x82\xeb$m\xab\xf3N\xf4]6\xa6\xc0u\x12\xa9h\xf39\x15\x9627\xb5\x12\x12\x01d\xc5\x8c"\x1e\x91\xb3!\xc6\xd9\xac \x04\x98U\x07J\x17\xbb/\xdf\xb4\xbe\x86\x11\xd2 \x16\xb7\x8e\x06\x99h\x9cn\xc7\xc1\xb9[.\x17.\xd8<\xbc\xc7\xdd\xf8\xdb\xd0\xfd\xf1\x0b\xf9\xaeT\x01\xae]\x0c\xc8\x02S[!:K\xaa;H\x89\xbd23>\xfe\x0f\xb7M~\xb4\x84\xb7\t\xc3\x98\xa2\x9f\x07\xc0+0\xd4\xdf\xdc\xd9FO\x9c]\x01\xae\xa7@\xad\x13\x8c\x85\xe2\xe6=\xb9y\xc5q\x93\xd4W\xc5d\xceVU:\xd2\xf4\xa6\x90\xe9\xe2\xd2%\xb2\xe1K\xd5\xda\xc3\xccG\xed=QB\xbc\xe1\x0c0\xe1e\xde\x13\x1d\xc9\xcb\x06\x8f\xc0\xefX\x1a:\x9b\x9csXL\x98\xe6\xabX)\xaeP%\xb7\xa6\xc1<R\xa1\xd3\xf8\xe4b\xc8i \x9a\xc6!lO\xe5\x9f\xd5\x08\xfb\x1d\x83\x88\x15\xe0\x10N\xca\xd5\xcb_d\xd1hY1\xbb_\xc3\x0e\x17(\xea\xbcE\x82\xf0\x04\xf5\x9fI\xe7\xa3Kl\xd3>\x1e\x00\xa7\xc0X?\xe7\xa0^,O\xcc0\xe0\xd0\x88]60s\x83\xb4"-\xa7\x16\x94y\xfch\x04\xa6\xbfh(\xe3\xe8\xed\xc0\\~r4\x0f\x88*\xcb]\x9fpv\xd2F.\xfe\xd3!h\xba\x9e\xc6\xee\x15\x9d\x1f\x0eS\x1f#\xf7\x83B\x0fZ\xf2\xe3\xccs\xb8\xb7G\xcfq\xe2\xdb|Vv\x1a(]\x80\xb3\xab\xdb"!.\xbb\xfe\xa7\xce\xfc\xe2\xaf\xb6\x8c\xde-\x974ky\x08\xe3e\xd9\t\x83/\xc2\x86\xb9ub\xf6\x7f\xf0\xf8_\xd4\xec(\x8bVJ\x1dKt\x00V\x81\xa9\xfe\xd2;#\xa0I\xdea V\x16)[i\xa5L5wf~\x06C\x84<8!-\xd7\xd3\x0b/Fs\xac\xeb\xa5\'\xe1"[k\xd1FGe\xa5\x903\xa0\x94\xc8I\xa9_\x99\xeb]z\xf4,\x85\xcew){\xc6\xfd\x0e2\xd3\xb3\xec$\xca0A{\xdc\xfd\xf9#\xff>\xdd\xeaUv\xc5\\\x91\xf4\xf4\x01\xb5\xa9F\xb2\x03\xac\x19\x89z\xd5\x8e\x0b\xd7\xbd\xf5m\xbe\x92\xc1\n\xf0\xb5\xa9\x8d{Q(%\'\x16\xe3d\xe5\x8a\xa3\xff\x1cX`\x84\xf4>uGl\xd9\xea\x1b\x01\x18\x05f\xfb\xa5\x19\x1a\xc0d.\x8e\xe5Q\xeak\xd0\xb9\xf55\xfe*\xc2\xffl\x01\x88\x85\xae\x00\xe1\xf8\xc3\xe24n\xe2\x90\xef\xbb\xd5\xfa\x1d\xdb\x8e^4o~\xe1.\xee\xd4\x88\xc9\xf2+e\x81\x07\x96\x98V\xc6\xe9\xc0\xb3\x00\xfd\xcf\xb1e.\xb8B\x89\x91\x7f\x0c\xe6\xa8-\x9f\x8f!\xb3\x04$]vn\xe6<9\xb3><\xe1\xd2V\x9cj2\xf8\xc9\xb7I\xe5\x12\xb8>\xc5\xcc/\xa8c\x86BmM@\x9a\xbdP\xc4\xa4Zz>\x15s!Y\x99\xb5\x07\n8\xc9T\xffs1\x8a\xab\xb3\x01\xd4\x0b\x9b.\xf4[\xe3\x85,\xe0\xfa\xef\xd3\x00\xec\xe9e\xd6^?\xc2X\xdd\xa6^\xb5\xda\xfc\x9cL\xe9+q\xb4S%\xe5\xe1\x929\xe5y\x18\xb9\xd0}k\x01U\x81\x03\xa1W\xf8DdEf\xb2#IE/\xb4?\x9a\xe0c\x17\xa9\xb7pV\xf1e#t\xd2\xc1\xa79\xb2\xa7\xde\xb5\xb2L\x05a\xce\x08\x06\xa3p\xdea\x96\xc1\r\x83,m\xf6\x10\xedy%\x04\x05j\xf4\x1d\xb7jfJ\x8b\xb6\'5\rc\xbd\xe6\x7fH\x07x\xc0v96\xd4\xba\n1\xc7\xa9#$\x00R\x81\xcd~6 \x1b&\x8f\xcby}d\xae\r\xf03K\xde\t\xffT\xf3p\xe7\xc8\x96\n\xa5\x11nN\x0c\xb9\xdf\x18\xa5gC\x83\x86\x1c\xac\xf1\x820r\xda\xf0\xd7\x04\x07\xce\xc1%>\x8dybNi\x8c\x01\xc0M1\xf6\x16\xe3ny\x93\xed\xfa\x15\\S}0B\xd4AN\\\xe93\t\xd8\nl\x85\x80\xdf \x89\x91v\x08\xcd\xc6\xe4c\xc3\xceS\x9a4q\xc8\xf5\xa1/g\xce$\x12\xb1\xbb\x9c\xff\xb8\xa5\n\x1b\xeany\x9aw\'Q\xb5Nz\x8c\xf8\x00\x84\x02\xbb\xfd\xeby^\x1e\xc1\xffJ\xf5\xee\xc2y\xa0\xa5\x11\x83\xf7_\xa1\r\x94\x01\xa6\xfa\xc7\x8bFH\xf3\xd9\xb67\xb5t\xe2\xd6\x15\xcb\xec\xd8@\xf5\xba\x18Bh\x0f\xd9\x0bW}\x07\xf0"\xb7\xe0b\xe1\xf7\x7f\xe0i\\\xa8:w\x84\xa2\xa2\x94e\xa6\xde+\xee\xdcU\xea\xa9\xf2k\x17\xc5\xbd]Bwr&&\xf4\xa3#\xa1X*N\xe6\xe6ke\xc6t]\x0f\xffR\xb3\xab\xf8\xf7\xf2\r\xc8\x89\xa2\xb4a\xc2+\x86\x97\xfe\xa3;V~\x1d\x8e\xd9\x1c\xc5\x94\xa6\x7f\x19mL\xcc\x00\xe0\n\x1c\xf6/s\x1c\x8d\xe3/[52d10\xf1\xb4\x04\xfc2\xe3\x1e\xadnU\xea\xb1\x95\xb0\x13l\x87\x11&3\x1a\xad6P\xb8N\xfez\xdb\xa1\xe5gu\xe2\x9as\xd3\x1az\rQh\x1a\x05\xdb\x1d\x1f\xb9g\x11\x07\x94\xecF\x94\x87r\xba\x81\xac\xb5p\x1b);\xb2 \x0f\xc4\xeb\x93\x19\x0f{\xf9\x1f\x13\xd2\xb9\xe6\xab\x7f\xbd\x8d\xe6\'\xd3\xc9w\x19\x90\xf4\x8b\xdaU\xees\x95Mab\xef\xfe\x87\xfd\x02\x88[\x14k=\x00\xf9\xf7Gx@;>\x05\x80)p\xda/M\xdf\xe8\x8b\x7f\x15\xe7u\x86\xaeH\t\xef\x10=;\xe4\xdb\xe7\xd0*\xca\\\xfc-\xb1+\xf7\x9a\n3-\x17\xc4\n\xd5\xf7c4b\xb5*\x12\x13\xf9\xb1\xee\xdc\xe7\xfa\xf6\x00\xe3\xe2\\\xd3{\x9f7\xcb\xc5\x9f\t\xcc\xf2\xb5\xbfh\xda>\x99\xf9\xaf!\x9e\xba`\xcb\x1e\x92\x8c\x07\xd9\x1e\x8e\xfc\xaf\xfch\xfb\x82P\x1d\xd6\xef\x8a\xc6\x85\x11\xb7\xf1\xbdl\xed\x8f\x8c\xa7e\xa1\xe8"\xf2N\xe9{E\x98\xee\xe7?6)Hy\xb5\xfbAUc\xba[\xe5"\xba\x15\x16\x00\xaa\xc0e\xbf+\x02\xa6#\xb5\xa9\xad[e\'\xc9\x01\xb6\x04I\xb9 \x8e`\xd2\xe11\xd7\x9c\xbd\xc0n\xf2\xf8\xb5\x8d\xad\x06x\'\x81\xf6(G\xb1U\\y\x87\xf5\xd7\x83j\x99\xf9\x88\xb1`r\xe6qG\x04\x8b\x9b\xf8\xf6\t\x82\xb1\xa1\xb0\xde\xc3\xdb\xbd\x96\xf3\xf7\x86 \xb0\x93\x98\xff\xb7`\xb4\xf5\'\x8c\x9e\x9b\xb6\x13\x8d\x16\x0b\xcdk\xdf\'!D\xf2T\xeb\xb0\xce\xd2_S\xdb\x9b\xa9\xf5\x88\xb2\xbe\xaa\x84\xd1,\x98\x96\x02\x0b\x83\xb7\xdd\xcam\x0e\xdd;](\xef\xd2\x83O\xb7L`\xc0\x7fBt\xc9N\x92\xf5\xeb3\xb6\x92\x8e\x1f\xach=\x00\xff/\xa2?\xae\xa7\xd8\xa1#\x18\x01@\xda\xedz[\x97\xa2\x8c/n\x90\xfeS]\x1ak\x05\x1c\xa2\xe1^\rz\x1d\xcc\x97\xb4\xfcT\xb5_\xa9\xc0\x84\xe8\x9e\xc6\xa8\x843\xa3O\xde?\x0cd7\xec\x84AYW\xea\xcdO\x9em\x8d\xca\n\x10\xb0)\xaaQ\x8d\xf9\xdb\x83*\xf9Q\x8bR\x06\n\xd1\xcaaN\xeaWN}\xe3ub\x0c^+AZ(v_,\xb2\xcd\xdcB\x11\x95\x042o\xfc\xffC\xf7\xff\xabU\xde\xb6\xd8\x8f+R\xe4U9\xaa\x1b\xb9K\xcb\xd6i\x7f\x9cY%\x1bs\r\xbc\xf4K\xcc\x00Q\x83.\xfe\x84\xed\xf6\xef\xe2\xb4\xb9\x07\xf8\xabt\xa5\xb6\x1a\xefO\xfdz\xa8\x1b\x15\xaa\xb6._?\xc1\xca\xc7_\xa60(\x9e:\n*\xb7\x80lW>>\xd4>\xa3\xd3\xfc\x0e\x075\x19J\xb0W\xf7\xa8\xf4c\xb8\x9cUm\xf8\r\xb9\x8d-\xdd0\xa0\xcc\xa4\x83U\xb9d\x93Q\x13\xc3U\xf0t\x17\xdf\xcb\xa4\x84\xa5\xf4\xdeK\x93\xe9\x91\xa24\xa5\xbc\xe1&\x07+\xdeGk\xdd\xd3\xc5v\x15\x93\xbb\x07;\xb7\xf0\x7f\xfe;\xd1\xb6r9\x12\xbd\xf9P;\xd0=8\x000@O?\x8d\xa9\x9a\xbc\xa99\xae\x90\xd9\xc9\xd5\x92\xf0\xc5dC\xf3\x89\xd2\x9a\xac\xab\x9c\x87\xba\x9cw\xbc\x05\x0bV\x19\xde-\xcd\xd2n\xcf\x8ev\xad\xfa\xd0\xe5\x8a\x97\x9b\xd5\xabk\xbd2\xbd\x92\xa8\xe5\x84n\xd8\xee\xea\xf1;\x9f\x7f2\xd1\xea4\x88fh\xcc\x93\xdf\x80\x8agt\x91e\xab\xf7\xaf2I\xf1\x9a\xec\xa2\x94\xa0\xd6n\xbc}\xe5p\xbbO\x96\xd0=\xc5\x1e\xf7\xe7\xdcAN\xdfij$\xd9\xaa\x15;\xef\xad\x92\x8d\xb4\xbfWk\x10g\xa5\xba\xe3hye\xb3\xf2\x12\xf5\x12\x10 <\x18:\x11\xd8\x97\xe9\x10\x9c\x19VM:{\xa5H\x04<\xe7\xd8\x1a\x1c\x1f\xd9\x9f\xb9~\\&\xaey\x903]\xd6\xddep\xd3?\x84\r\xbf\x83b\xd5\xf0\xb9\xa8\xb9\x96M1)<\xcf\xf9\xc1\x19\x9c\xbc|\xd3C\xd6\xd8\xef\xc1\xe8(\xe7W\xfe\xf0\xb5\t}\x81\xbc\tR\xb2L\xf3\xb38\xa9\xcf\xea\xa7B\xf3\x15\xab\x03\xee\x91\xc6\xd5\xfe\xf0\xe4\xfdmP#\x93\xdf\xb4\xa3\x11\r\x96\xf1\x16\xad\xa6\xf9\xd6`k\xd4qo\x12\xde#A M\xb3\xcdo\xc6#n\xa1:b2\xf2(Q\xd6Q\xf8\x18\x1d\x16&\x88\x19\xf3\xef\xe6o\xe2\x81}\x9a\xafKU\xc5v\x14\'[\xbe\x94#]"x\xecQ\x8f\xc4\xf2\xccr`\x03\xd4\x0c\xcb\x7f\xf3y\xf2\xc2\xf4\x9f\xa4\xc6d%\x1c\xfbKQ\xf5\xcf\xf8b\xbe(s\x12\xa7\xfc\x89\x93- \xf73\xc1\xc9-\xa7\x07V\x18\xd3(\x92\xf6\x16h\xe4\xe3\xb0\xa9E\x03g\xdd\xd1.l\n\xd4]\xb7\xb3w].\xe0\xc3\xd5\xd7\xc1\xfbH@\x91G\xd9\x1d\x18\xf0\tK\x12}\x82a\x92\xf8\x9e\xb1,E/\xf9c\xe50\xe0U\xe3\xda\x80\xbc\xa0\x95se\xa6\xf8\xe8P\xc8\xef\xc9\xb1}\x1f\x16\xaf\x87\xc9Qo\xbb\xac\x94\xf0\x06\xab\x96\xaf\xea\xcd\x99\x04\xb2\xd3\x00\x0eP1/\xda\x1eL\xfc\xa0-\xb7T\x95\xdfJR\x89\x13\x99\x7f\xe6a:C\xb39\x7fb\x90\xcc\t0M\x0b\xb73\xd7\x1a\x7f\x11\x9f\x0b\xae\xee=V\xaf\xf8rrpJ`\xba\x8b\xf1\x00\x06\xd3\xa1/\x14@\xcc\xd4#"\x06XsR@w\x8fGv\xf3\x0cK\xfb(\xa6d\xd26\xbe"bs\xcbX\xadW\xf8\xb3\xc1\xd5\xbd\x9a\x12\xdb4\x92\xfaQN\x1d\x94\x1f\x9a\xf4\x0b\x1a\xb2\xecZH\x80\xf0k\xd9`\xc6\xccc\xf1\xeb\xc4\x9c\xc9\xcdo\xff\x86\xb9\xeb\xc9\x99\x014@\xcb>\xf7\x92\xbe\x12\xc0\xea\xd1x\xa4\x97\x9f!sE"\xfe\x9as\xc6\xc3`\x88\xd7y\xacD\x16\xb2@\xec\xf6r\x14\xf9\x9bE\xa1n6\x90N/:\xe4\n\xcd-8\n\xa6\x93\x89\xbc/e\xdfs\x80R\xeb\x945\xdb\xe9\xedRIW\xecr\x1c\xb0\xe6\xf4\\-\xffqt\x1c\xcd\xbfr\x0c\xb8\x98\xc3\x1eO\xa6:Zh\xc7\xc3\xea\xb9\t\x82\xf0t\x1cL\xa2;\xfa\x99\x825\xc9\xbblQ\xea\xc75[\xbfM\xde\xe0\xc3\xe12\xb7]bn\x1d\xb8\x00\x98\x05f\xfa\r\xc6\x99w\x99H\xafpd\xcf|\xd5\x0f\xe6\xcf\xfc]\x9b{\xa4\x9b\xb6\x81\xb9o\xf3\x82\x97,\x13\xb4\xc2aG\xe9\xb9\xf7b5l\x1d\xc4\x86\x9ft\xa9\xdc\xe0M\x0c\x18\x11\x07\xb3tn\xb8m\x19\x94\xdf%\xb0\xf3\xb2\xedw\x873=\xed\xa4s|5\x92~\xa90P\xa0Pn\xd3{\x19\xcd\\j\x82\x05W\xd4ar_B\x12\xa0\xdf\x85\xbe\x7fR+u(\xd2\xf1\xb3\xe5\xdc\xd07\xaa\xea(\xc4\xc7\xa0;\xfa\xae!\xb3\xf1\xf6mG\x17}\xacn\xec\x06\x83\x06\xa8\x19T\xfa\xc7\x9a\xd3\xd6\xf1~\xfb\x80#6\xfe\x94N\x1bB\x07(\x19\xd4\xe6\x9b\x9c\xef\xbd\x9d\xc2\xfb\xbeWj\xb3P\x9a\x94\x90\xcbtUp\x93\x1e\r\xf1\xbb\x9a\xd2/\x06\xdd\xcdz\t\x16\x13ml\x8b\xfa\xc0\x8b\x8b\xae\xe4\x17\x8f\n\r\x1d\xbe\xc5\x1f\x9b\xfc\x03f\xe2\xb3L3\x99\xf3\x8cmY\x1e\xde9\x97\x9fv\t\x80\xf6\xddN+\x9b\x893\xa3\xd2\xa0\x13n\xb2\x1e\xf3\xd1\xb8:">\xe7\x8a\xb8-\xe8e\x8d<\x13\xc0e\x84\xfd\x95\xc0\x05\xd1\xfe\x05tG\x86S\xb4\xa7\x9a\x90\x9bc\x07p\n\x8c\xf5\xfbk\xb3\xbe\x19\xab\xfd\xb9a\xeeZY\xaf\xfd\xf8\x8b\xa8\x03\xfd[?\x95m\xf0\xf9R\xf9\xea}\'al\x89S\r\xb6.\x0fKK\x17\x9b\x8c\x15\xa2\xb7^\xbeW\x95\x90~\xb1\xc8\xbf\x8e\xda#\x17\xb3\xef\x90\xb8\x92C_\xe2\xf6\x98TQ\x8dw\xaa}\x1f\xf5X\x967\xaa\x14\x15\xd0\x9cy\x8ab\xf5C\xff\x07\x18{\xf4o\xb5\x91|\xaf\xc0\n:\x8c\x9aV}\x8e<#9\x9e\xf0\x93\xc1\x0f}\xfaTOf\x10\x87\x89P\nq\x1a\x145@\xc3\xa0\\\xdf$\xcb9\xd2S\x9dT\xd6\x01\xa0\x01\x86\xc4\xfa\x99\x1f\x99+\xf25\xd3s\x7f\x89\xa7}\xcc\x88\x10\xec\xea>\xf682\xcd\x9b%`r\xd6\xc4\x10\xc9u\r\xd5|\x16Y\'\x10\xca\x9d\xed{\xf6\xc5\x9a\xa8\x00\x05\xf6h\x0c.:8\x10wt\xb8o\xa6\x85\xae,<3\xfd6\x82\xcf\x8a-\xed]\xf2k\xc2\xdc~y\x04\x95\x95V-`\x8b\x8c\x8b*\x89\xe0T\x88\xbc\xe9\xa0lv\xb7z\xdf3\x9e\x89|_!N\xc6\xd1\x99\xb7\xa2\xcf\xa8\xb7g\xecY\x14t\x00\x9d\xe3\xfc/\xb7D\t\x8e\xbe\x9a\x90r\xed#\xd3\xbf\xe4\xf4{\xee\x1f\xff\x9e\x88\xfd!\x8d\t\xfal\xdfbr`\xec\x84\r\x8b\xa4\xba(y\x02.<\xf4\xb1A\n\xd1\xae\xae\xee\xed8\xc7\x9c=\xd8$\x85\xd9B\xbf\t\xba\xc4\xfd\xa2\xfd\x8e\x08\x87\r\xd6C\x87\x983z\xbc\xfc\xe5F\xf3\x92\xef\x80\xca\xbaK\xc7z-\xe3\x0c\xc9\x80\xe1\xaf\xaf7PA\xbb\x0cE\xf1\x9d\xff\x02Ie\x8dviY /\x9e\x86\xf2\xf1\xf1\xa0\x98\xb2\xc8\x17V\xb5\x0b\x8f\xd3N\xb0\x1f\xa0\x014\xce\x1bp\x0fi\xc0W\xab:\xb7e\x85\xc8_z?\xcdgu\x85\x88;\x90\xc8.\xa2_\x128\x80\x03\x0fA\xd7\x02\n\x03l\xc5\xeb\x8d\x1e\xa0\x0cU\xbf$\x07#\x9f\xcb\xdc\tb%\r\xcb\x82\xca\xcb$A\xb27\x03\xc8\xd4\xa9\xdcW\xa8\xd9!\xa2\xfcOT\x99&\xbbAm\x1a\x0e\xc7V\x14\xc0z\x1a\x17\xd78\xfe\xa4\x8cpE\xe9l\xd1\xc9\x93\x80\xd5\xc5\xaf\x19o\x1a\xe2@\xe2.\xf3k2\x95\xb1.&\xee\x0fi\x18\xc0<\xe8\xec\xd7V\x86\xa88c\xa4\x03\x03\x14~bx\xc3\xe6\xe6\x1f\xb3qZe\x06u\xab\xa1\x19\xadN\x95u\x87\xd1\xa2\x06a\x84\\\x03\xc2\xe9\x83\x01\xf2\x96,N8\x86\x80\xa5Ve;([\x93I`\xc2$k\x12\x00u\x16CM\x8e\xc7J:\xfb\x88\xb9A;S\xc6\x0c\xa5\x04\x05\x88X\x9a\xac\xc7\x86\x1d\xd4\xb7\x18\xba\x08,\xa6\x8a\xea\xc3\xd4f\xd35v\xe3\\\x8f\xce\x8b`=\xb7G>T\xa1\xed\x82\x86\xe3\x10\r\xb8\x91g3\x85 \x01G_v\xb4\x0b\x11\x0fl\xbdp\x8bg0\xbe\xda\xa7\xc4=\xbc\xd1l\x0bS\x1e\xee)JT\x05YB\xa8\xd8\xd6\x8d\xfa\xbf?\xfc\x1c\xcb\xe6\xdc\xb7~\xa0\xa3\xfe4\'hQS\xd2\t\xd5\x00M\x1bM\x89\xde\x8cRp\x81?\xe7\x89\xf2\xe8\x01\x8a0\x0c\xce\xdb^w\x96\xcb\x0c\xe9\xfcel\xae\xa3Lhx\x19\x01\xeb%\x96\x0e@+@y\x91\xd5\xff-R\tuW]\xb4\x9e\x10\xa8N\xd2\xd7\xcd\xe0\xdb\x94y\x1e\x8c0v\xda9\xbd\x13\x05\xa9\xb2\x9a\x1c\x95\xd5r\xb8\xefdg\xc1|u\x13\x96\xc6)q\xb1SX\xf6J#\x10\xb1\x94\xb90\xa84\x9c\x91\xba\x10\xab\x14\xcbg=\xbb\x0fk\x00v[\xe6\xcb\x03\xe5\xd6\xab\xc4B\x18\xfb\x7f\xbe\xe7_\r\x02\x88\x17\xe1\xd0\x00\xf8y\x17S\x971\xed\xde\x1e\xf9\xac\xa5\x8e *\xf3\xbd\x8eO\xc2<\xc1\xfb\x97\xaeB\xe3:\xd5\xf3K\x8be\x07i\x01\xa7\xa3\xbb\xf7\xaf\xc5q^r\x8b\xfckI\xe7\xdd$\xab\xe4QT\xd5\xaa\xe0\xde\x08\xd9X\xb2\xf9\x0c\xad\xbf9\xff\xf8q\xa1\xdc\x02\x1fb\xe4\xddt\xdc\x9c\xdfH4j7\xf5\x93\x08\xdaP\x7f\xa9\x11\xf4\xc2\x8c\xca\xa0\xea\xb5=\x7f3\x9d\xb4u\x91\x05c0?\xbb\xdf`\r\x0b\xdc[\xdf\x82-H\xb7j\xae_U\xc0\x19\xbez\xcf\x83\xbb,\x97\x98\xc1\xa4\xa0\x90\x06iQx\xa3\x95\x11\xb3\xe9\xe9]r\xabEMf\x0e\x96oN\x94gn\xdc\xdf\xa0\xb2\\\xea\xbca\xce\xad\xed\xf7\x0bv2U\x1c\x13,\xe0\xe4\xdbZ\x18\x03\xbf\xc5/\xc6\xd3\x95\x80\x1e\xba\xa5^\xce\x1c\x0e/)\\\x17\xbb?\xfch\xa7\x0cxK\x9aC"e\x9e\xd0\xfb\x83\xd5!,\xcbQV+\xb9\xf2Q\xd2\xafd\xe1\xc48\xe9\xd1\xc0\x9dZ\xba{\xc6\xa2\xfe\x95\xa9\xcd\x03\xa2\xf6\x98\xe5~\x85\xa7*x\x15\x10\xa3\\g\xeb|\xa6\xcc\x82\xb6\xdc\xb3\xfc\xa5u\xc7\x00\xfd\x9e\x9a\xe9\xf6\xdd\xd6}\x0c\x8b\x06\xb2\xcc\xa9\x88\x0c\x0b\xc0\xf5k\xc1\x9b73\x0bT\x9bW.\xa1 g\xc6\xa9=c\x8d\x99\xe6\xcd[\x8fX\xa7\x07\xde\x03\xcat\x1a[*?s\x1d\xf0\xc0\xc5\xa4\x143\x90P\xae\xba0d\xd66\xf9\xaaW^Bf\x10_\x88<\x9d\x1b\xd2\xd9[\xc2G_h^\xca)\xda\xdb.l\xe8pa\xffA\x1e\x8b}\xf0\n\x0fS\xf6#>k\xe3\x81\xc8i\x88\x9f;\x9f\x1f\x17O\x03\x16\xdc\x8b\xfc\x81u\xd8\x18u\x8a@?\xado\xe5qoD\xa3\xa7\xc6\xa2;\\%\xebi \x9c~\xd0\xca\xee\xba\x04\x11\x87\nY\xb6``\r{\xee\xc3j\xd4\x03\x01\xbe\x05m`;\xf2P\xd1XE>F\xb5\xa5\xe6m\x98\x86\xa1\x13\xf7\x8f\x1e\xef\xee\xc8P\xca_\xa8\xba\x19~\xd6< 6\xad\xae\xdf-4-\xa8\xdf\x96#\x1cf\xf9\xd95\xb5O\x827\x0f\xe5\xf3\xb5\xa8\xf1\xeb\x00<\xf7\x10\x1c\xe2\xffBg\x05\xbcr\xdf\x95\xad\xa02\\\xacO6F\xdb\xa2\xf6T\x81K8\x94\xf9\xb7\xfb\xc2\x8cy\x87A\x9c\xea\xd1^\x97\x03\xec\xda\xdcw"l=\xf8)8-\x1d\xb4e\x9e\xac@3\xfehI!\xc9a\x8e\xc5D\xdc[\xe4\xabb\x95\xa8]6\x83\x9e.3\xac*z\xd9$,\xde}R\xdfK\x1a\xb3\x10\x10>\x8dV\xa8r\xae\x02-\xa0v\xe3\xf7n\x1d8\x9bY\x94\x1f\x1by\x93\xf3\x91=\x99h&\x84\x15B"x\x0c\x01\xe5\xcd%\xbf\x96w[\xe6\xc2\xee\x82\xb3\x13\x1a\xb9\xebc\xb9\x9aq*\xbc+\xc2\x13\x9cgw\xbf\xd0Cx5\xa0\xdb\xd8\x81\xcc?\r=T\x00\xab[\x94\x98\xaaE\x89\x8c.0\x88q)z`\x9e\xc7"}\xb4\x8f\x175j\xc3;\xdc\xee\x91q\xe3\x03\xbc\xe6C\xf4\xfd\xe95\xa3\x06\xb1\xdf/t\xfa\x98\xf5\xd7p\x85\xacg\xa9BV\\\'\xb8\x19;\x04\x1d\xbc\xc1\xe8Zd_\x85\xd0!$\xdb\xf7L\x12E\xe9~\xe2l\x94zd\x88\xb1w?^\xf5[3\x1b"\x9d\x9f\xc5\xf9\xa2\x8f\xfa\x1b\x19~\xff\xa4#=\xb4d\x90>(F\xadq\xe0\xf2\xb2\xa4]b_\x96\x95m\x1e\'\x0c\xd4\xc5\xb1S\xa0\x1ds\x89-+\xd9\x81\xb0\x07\xfb\xb9\xad\xe9-=\x0fuP\xae\x03L\xf83}\x91D\xa7\x04F\xb2\x98y\xe2I.%\xf2\xcf\xb9\xfff\x08Y\x19z\xf8\x8e\x17\x0b\xb2{\xb0\x0f\xe2\x86+\x1eB\xff\t\xad\r\xee\xca\xf3\xb155\x04\x83\xa4Z\xa6\xcb4?~\x1a\x90\x8a\x89\x9c\xc7\xd7\xaa\xf3\xa9\x07\x89#\x87\\\x07\xdcy\xa6\xbdi\xb1\xce[\x90\xd8\xa5\xbb7y\xb0\x9e\x01\xe2\xc6\x90\x91\xe5(!\xd8\xefTk\x17\xfd\xd8\xe1\xd70+\x8cq\xdc4\x9c\xec\xb6t\xbf{\x12{\x1a\x947\x0e\x16\x01R\x80HP\xda\xbe\x03\xdc\\\x19:GZ\x0e\x80\x8b\x9b\xdc\xe1{\x1cp\x008\xfc\x85\xdcC\x03\r\xea@D\xc0\x82+\xae\xfb\xfdlh\x92\xbe\x95n\x80\xdcMJ\x0fT2\xf0\x9eC\x04\x83D\x93Zn\xf2O[Wa\xc7\x87$\x07\x0e\xc10St)\xce\x01\x0e\xe9\x89\xb8\xc3\xb1\xc3@\xecR\xcb5CX\x85Rz\xce\xc0\xa5\x96\x1d\xc3L07;r\xf8Q<z\xbb\xc3\xa8\xf8\xef:\xc1i\x90\x94\xf7Px\x91u\x1f7E\x97F\x08\x02\xc7\xc9\x0b\x98\x02\xd1\x18\xe8\xce\x0c\xa9\x15\xaa\xc5T\xe9\x80\xf6\xf3\xfe.\x0e\x02\x1a\xfc\xa5\xdc[*\xf4\xa8\x0cH\xcbO\x9c?\x06\x00\n\xfc\xb5<\xcac4\x8e\xca\x914\xd9\xe3f\x89\x088\xb7\xb2\'\xf9\x10\xe4\xc7(\x88U\x84\x1a\xce\xafF`9z\x9d\x01] J\xc2\xb6sd\xf5Fx\xc2\xadM\x96\xa4\xd0\x9b\x9a`\xea\xda\xf4o\x9c\xf6~\xc5s\xe3\xf9\x06\x1d\xb8\x91\xa9\x1e\xd1^\x1a;\x97\xadj8\x1a>\'L\xa7!\xe9O\x0f!f\xa2\xf5\x89F!\x1b\xfa\x8bY\xab\xbc\xa1Ti\xe0\xe9\xb2\xdei\x82\xe2\x04V\xaejq\x99G\x02\x80\xf7\x01XM\xa8\x7f\xff\xc4mM\\\x9a\xb3K\xc4\xdbn+\x00\x12\xfc-?\x93\x8f\x7f\x94\x13E\xcd\xe6c\xf3Q\xe1\x11\x12\xa2\xe8\x96\xce\x96\xc5n_l6\x86\xb7\xd8Z\xa2\xf5YOx\x0f\x86mu\x04\xf9$V.\x11n\xa0(\xc9\xa2Z\x01\xc1\xef\x95$\xc5\xe2\xdd:=\xa2\xa8\x1fcIC\xef\xa3\x88a\xfd`R9_\xf7\xa2\xc5N\xf1\xb6\xb1^Xh\x19}\xb3j}T\n:zY\xef\xa4tC\x94\xef\xfe$9db\x17\x87\x1bt\xc0\x0e\xa7\x07(zi\xb9a1\x1b/\xfd\xd1\x85\x04\xd5:D\xdf<7\xec\xd4\x1f\xd0\xd6\xc6\xbc\x02\x08\xf4\xf5\xfab\x7f\x96Z\x94y.\x1c\x15O\x05|\xdf\xa5\xc8\xe9\xfa\x16\xbe\x0c:z\xbdg\xb9\xed\xb4\xbbuW\xac\xc9\xb2\xb3\x03\x1c\xa6\xe6+6b\xb0*\x82\x97\xf9K\xec\xe6J\x93L\x1f-\xdc\xc1\xae$\xa5\xa6\xe8\xaa\xab\xac\x19\xb2\x89\xe8\x88X0n\xfa\xdf\x05\xa7\xf0,\x84\xa7\xe5VbF\xc0\x9b\xfa\xda\xb7\xe2\x05\xdc\xc5\xe9/W\xfb3E/\xe2&\x15\xc3\xf6V\xb4\r1\x8c\x9c\xbaA\x8a\xa5\x8aR\xe60jMD\x0b\x01r\xf4\x8d\xfa\xd2\x9f\xa0\xcf\xf2\x17[i\xe58\xb2\xfa\xb51\xfb\x012\xf4\xcd\xf8\xda\x9e\x8b\x87a\x9dx2\x06W\x12(j;S\xe75o\x80<\x0f;\x04\xeb\xb0\x9d\xd6\xa2\xa1\xf0tdx\xf6\xb4\xf6\x85\xa03\xcc S\x94\x1e\xf7\xd6$\xcb\x13\xa7=\x93\x04~\x1ek\xc3\x10\x85\xd9\xe9S\x9a\xc7\xb3\x86\xf1{Y\xc0J\xd5\x10\x7f\x9c:_\xaf \xe0\xca\xcd\x08\xcd]{L]:\xc3\xf6o\xc2\x1d-Wd!\x16\xa6\xdc\x02@\x9dq\x13\xb2\x06\xbbJ\xf2E\x83\xfcQ5&\xaf\xd9\xdcL[m\xb3 \xb3*(\xcf\x01R\xf5-\xf9\x02\xfb=}D\\\xfb\x1f\xb9\xb9!\xf4\x13\xab2\xcf/\x8e\n\xa6\xec\x04i!L\x99\xa6\'#\xe7\x9aQP\x84\xb3\r\xa9\xa9u\xa0\xdc\x13\xaaC\xf6\xc4Upe\xe2Y\xca\xd7\x1a\x0806\x80\xbb\x90\xdc4\xdc\xc1\xc6\x95}!\x99\xb3\x16\xbd`t\xf2\x8a\x1e|\xaa%V\x83\x8b\xd9!f\x83\xc0Eh\xe1\x96\x93\x01\xee\xe2\x84\xb4R\xc4\xd3l\xa0@,\xfb`\xe63\xd8\xae.Rn\xd6\xb9\xe5v\xc0\x1d\x00\x10\xf5m\xf9V\xc2\xeeik\xfaJ\x94\x02\xf7\xfc1\xbc.\xfd\x15e\x1b\x9c\xbe\xa0\x08i\'\x0eq][m\xb1\xe7)\xebX\xd7\xaa>X@\xfc],\x94\xce=,\xc0\x9b\xf6\x05d{\x08\xa4F\xc9p\xe8<\xa7\x10\x10\xc3\xa0\x94A\xbf\xc1\xe1\x02\xa6\x1ds5\xdb\xe7d\x87\x98\xb4\x93\xbb\xf6\xce\xbb\xb0\xbcZL\xb5\x08\xddLOS\x13\xdd\xc1\x8e\x98\xd5\xe1\x8d\x8d"B9\xc9\xd2\xdd\xb8l\x17\xdb\x88\xfe \xe4R\xcd\xeeO)r\xe6\x8dX\xae\xe6\xea\x89\x03`\xf8\xec\xfb\xf7Z\xd7R\x8f\xb5&\xf9{\x14\xa7\xadz\x80\xa8\xfe\xca\xa2\xfbfT@v\xbc\x9d\x9b8\x8f%\xedM\xbd@\rf cp\xbe\x18c\xb5Y\xdc8\xe0\xe0s\xc3\x00\x01\xbc\x1c\x9al\x05L\xc8\x1c+\xf1\xbb\xa9\x06\xdavCI}JD\xbc\xf6\xb4`\x132q\xca\xde\x0f\xe3\x97\xb1f\x93\x0c\xe8\xcf\xb1\xd6\xac!\xf5q\x857\x96\n\xe7\xe1\xcd\xa9#\x9fz\xd1\xbc\xb1\xf3\x0eV\x8d\xe2g\x14\xa3\x1e+\x199\xb9T\xc4\x02\x00\xfb\xadA\xd2\x81\xa7>\x87\xe1\xe9b\xe4\xac\x9aZe\xd7[c\x9b[l\xe51}\xff\xfc\xc8\xd4\xdfmU\xa5\x8b\x07J\xceF\xaf&\xd9\x84\x8e\xd14U\xf5\x80}g\xf9\xb2J\x01K\x8b2\x13P\xf2x\x94\xa7\x8a\xb3\xc4\xda*\xcb\x14w2\xa4\xdc\xd7\x074\xd2\x0e.\x01\x92[/\xd7\xaf\xbb\xf0m\x13\xb87X\xde[xOf_\xa3i\xcf\x98\x89\x92\xdd\x94\x8c#\x12b\xac\x08\x7f`\x9a\xc0Ua\xba\x9d\xe9\x17J\x1dQ)\x85\xcd0I\x7f\r\x08\xdbd\x1an\x07\x1d2\xc0\xa4"\xca\xa0\xc6\x03\xf8\x84,\xca\xc0\x18\x9b\xa5\x17\xcbj\xbeX\x9e\x06\xe0\xa7\x91[$\xb0]\x82]\x18\xe8\xe0%C[\n\x83\xe0\xb4M\x8fyO+\x8c\x9d\x82\x1cgyIV\x03\xe4]\xe4oS\xce\x7fe8\xdb\\\xcd\xdc\x18\x0cA\x1a\xa1\xa9\xd4\xbdxZ\xd5\x8a\xd2j\xc6\xa3=?\x85\x0c\xee-\xe2\x1e\xca\xdd\xc4eeMJ\xaa\xc1r\x1b\n6cW\x1d\xf4>\x18\'\xa27\x85\xecR(\x96\x08TkF&1,%\x08<\x83!\x06G(%\x99\x01\xa1i\x9e\x10\xc6{J\x90\xf1\x9e\xad\xb2\x03\xe3;\x80\xb6_\xcd\x13&\x82H%\xbc\xa6f\xa4\x0f\x96\x0c7\xde\xbf\x07\xdf\xb7;\xe1n\r\xb6*\x1cN\x01\x0c-\x0c|\x11b\xcc\xff\xb8>\xbb6\x16\xc5\xb6s\xe9\x0f\xa5\x91\xfc!C\xe3\x98\xa8#\x175\x05stq\x8c\x83\xea<\xe9\xf2+\xb6\xca\x8bZyo\xdd\x8a\x04\x9ak\xcfZ\x8d\x82\x0e\xf8\x06\x8a\x94E]\xa2j@\xbd\xa1CS\xba\xbb\xcd\xa6\x96\x0c\x03\xf8\xda\xa3R\xb8\x8fi\xee\xd6\x14:\x95.\xee\x7f]@\x15s+\x88\xc0+&^O\x19A\x94\xb68\xff\xd34{\x1c\xd6N\xea\xe1\xd9\xde\t\xc5h\xf57\x9d/\x15X\xde7\xcf\xb3)y6\xab\xb4|E\x1e\xf0hn\xe3\xea\tO\x1aD\xd8~\x87\xce\xc8\x02}?\xc5>|\x10!\xfc\xacH\xe2\xb6x\x15k\x16\xb0;\x9c&\xb0h\x1d\x19\x80\x0f\x1b\x93\x89\x1d\xb9\x95\xf2\xe3\xa0\xce\\\xca\xa7N\xd3@\xb6i\x96S1\x84t\x80\xcd\x80\xb0\\@\xe4e\xe2\xc6\xe4E\xa9V\x97F\xe7\xbc\xac\x96<\x85m\xea\x04\xe7EM\x88\x85v\xe8\xba\x8b\xb4qh\xc8N\x14]\x00\xaa<\xe37\x13\xf9\xe080\xe5sHN\x19/\xf6\x8f\xd6\xa5\xeb\xa5E\xb7n<\xd7\xfc\x91m\xa5\x03\xccV\xfb\x13\xac\x19\x7f\x89p\x03%\x0c\x94\xef\xf6w0\xa0\xce\x863:\xcc\xbc\xa6\xcdhAd\xc3\x80\xa0\xe9\x0bh\xf0\xc0\xfe\x1dE\xa25P\x88\x18.\xc8\xe7\x10\xa8\xa4\x89E\xf6\xef>ZS[\x190\'\xb8\xd2\xce\xabRv\xb7K\xf0J&\xd2\xa2\xd9n\xc2#d\xe7\xa1W+\xfb+E\xf1"\xa3\x92\xed/@\x0cS\xe2\x80q\xc1\'\x8fR\x05\xc2\xf8c\xd4a\x9a\xa8\xef\n\xd2[\xf4U\xcc\xd6\xd7\xdbE"\x9c\xcc\xda>V{\xacQ\x99$G\xef\xb0\x85*\x9d\xf9\xc1\xbd\xd8N\xf18\x9arp\x8c \xd4\x07*\xc0\xf2dX3\x9a\x89\xc7%;,\x9f\xf7\xfa\xd3\x02\x0c\x05\x03am`8$\xd0\xe3\xd0\r\xba\x13\xadN\x91\xefH\xff\xceD^[\xc5\xc7\xb8\xfb\x83\x9d\x0e\xf8x\xd0\x8c\xfbk~.\x15~\x81T\xb8\xf3\x1b\xb1J\xf4KY\x82\xcb\xff\xe4\x0e\x00\x13.\xfe\x01\xa5z\x96\x9c.\xf8\xdag\xbd\x90o\xa1_\xdc\xb3\xd5\xbb\xf5a\xbe\xe8\x87V_!\xef\xaf\xfb\xe46\xf9\xa7\x1eq\xf7\xc54\xc0\xb1\x84s\x98\xba;\xa0\xb7\xac\xb1H\xe2a%\x1b\x93\xbf\xb9>f\xdd_\xe0p\\j\xd2S\xaa\xbf\xbc\x1dT\x85\xe4\xea|\x90\xa9u\xfbY\x0e\x8a\x94Fs?\x8b\xd9\xf4\xa6\xa6\xa9\xfc0\x03\xf3\xe1m\xba\xd2\xeb2\x91Jm\xe6Izs\x8e\x17\xdf\x03\xe2\xf5\xd7\xdc\x9d\x8d\x85B\x81\x10\x0e\xfb"\xb9\xd6<\xab\xa2(m\xbf\xf4\x97\n\x9d\x89\x8f\xdb\x03\xea\xb2N\xe8\xab\xd4\x8cp\x0b\xb3\xac\xea\xda\xec\x10\xa5/\x8c\xc42L\xbdB%\x0b\x12\xc9\xcdV\x7fAe\x0fH\x94n\xfe\x1da\x16\xa1\xe3\x17\xe9\xd8\xb8\xc3\x8c\xe7y\xa11Y\xee\xa0\x90\xd35s_w\xa1\xec\x92>^\xbc\x19\'%\x83\xa5\xf3\xb4\x06W\rs\xea\xd7\xa7y\x07\xfcu{\xf6\xbc\x0fDz\x7f\xba3\x10\xeb\x0f\xea^9\x01\xbb~\x14\xe1\x83\x04\x0f\x8d}\xefL\xf6\x96\x9e\x89\xce\x12\x96DX\xa5\xdfZ\x08u\x00\xcd3\xe1\xf9\x17\x7f\x9b1\x03\x89@\xb4c\x84\x9d\xd1\x1eX@>\xe2\xd8\x1a-\xd8\xf8\x92\x12v2vO\xb0\xaa\x8b\xeeT\xdd\xa0\xe8&\x869Qs\x8c\xa5\xbf\x8a\xd3\xe4w(O/(\xcf|\x13\x93a4\xb7\xbcr\xe5nI\x8d\x10\xd0\x8b\xbc\x05c\x01\x8a\xe0\xc88\xf6f\x92|hU\xe5`Z\x12;q@\x02\x17G>\x10l\xf1\xb6\x97*jX\xe3\x94%"v\xcb\xb5\x19\nf\xf4\xb6\xf0\xfe\x00~\xbd\x9eO\x06\x08\xf8a\n\x042(\x7f\x07\xd5\xd4\x97\x049\xab!\xb2\x85\x9f\xec\xceU\xca@\xcc\x15\xb6\x19e\xf20\xd5(k\xcc\x11\x87sz9\x7f\xd3Tl\'\xda#\xeb\x95\xc8\x9a8\xd2/\xfa\xd2C\xbb\x9d\x18E*C\xe3\xf9\xe0hV\xd7Nn\xd4\xb8\xaa\x11D\x19\xc8\xa2\xb9\x13\xa3\x9f)Z\xb9\x15\xdas\xcc\x90\xdc\xe0fz\xed\x13\xf3h\xf6\xc2\xc3\xcc8n6\xb9\x80n=\xc0\x06\xdf\xa2|\xa2A\x93x!\xb0\x1f\x18_/d\x83^\xe9#\xf6`\x8d\xfbQ\xad\xabu\t\x94\xc3\xee\xfa\xe7\xd9K\xa3\x02\xd3%eg\xe8\xf8\xaa\x87a\xa8ln&\xa5_O\x8e-\xb4pma\xe4F\xa8M\xb7F\x17B\xa8\xf3"\xf1\xa0b\xf8@\xe4\xee\x8e\xc9\xff@6V]\x96:.\nF\xdf\x9dZF\xcb\x95\xb8\x05\x1c{+\xa8S6y\x87vcJ\x84\x14\xa4\xbd\xacyc\xf5\xb5\x97\x0c\x1d\x8b\xfb\x00\xd3\xba\xfd3x\xf7\xaa\x97\xb14\xa9j\xd0\xba\x8e\xd0\xa2\xde\xb35NG\xd1\xcfE\xf0#\x8d\x9cR7AE\x8f\xbd\xa3G\xa2<\xcb6\x1fe\xbf\xabj m\xee\xa5\x02Y\ne\xe2\x8c\x12\x11xV_D\xaf\x1cK\xf8\xc7\xb9\x8b\x7fp\x18\xb0\xc1/\x1a\xd1\x04\xeb\xd1I~_\x0b\xa67y+\x18\t$\xbcwq\xf9aS\n*\xack\x15\x14\x7f%\x85v\x88(!\xb0\xa3gB\xf0\x1e\xbb<\xb8\xe7\x99Z\xa5g&\xdc\xa0\x9b\xbc\xa6\xd2\xcc\xaa\n\xe5\x081H\x0c<^"P\xfe`\xde,FNS\x83\xdc\xf7b\xdf\x89\x10\xbb\xc4\x93\xfc\xd9n\xf7\xd4\xff\x1c\xb8\xdfS\xbf\x98k\x9d\xe5\xfeO|\x9f_\xb8\xcb\x9c\n\xbb\xb8hm[\xd3\xe8/\xf2\x13`&\xbf\x17\xd8)\xf4\xf4\t%\xeb\n\xbf\x07qV\xb6,\xb5\xc4.Sxm\x8a\xe4`\xa3\xaa\x85\xc1\x84\\\xce\x0eJ\xc8\xcb-\xda\xda\x9cd\xa6\xe9\xec[\xacf\x7fpcpy\x7f\xb6\xfdQ\x1e\x9a\xc2\x87C\x96l\xd5\xde[\x17\x9b\xb8\xf5\x88-\x10\x8dH9\xed\xe3\xb5\x0c\x97\xfa\xd0\xcd\xb7^\xa8\x85\xdd\xdcXbR\x85\xaf\n\x9d\xd5X\x80\x1a\xa5\xc4\x18\x88G\x9eP_i\x17\xcf\xef\xf3\xcd\xa7H\xb4\xa7\xe8\x9efZgi\xdbUD\xfc+\xb7[\xdeWV\x1c^\\\x9f\x14\xd4x\x7f\xd6\xe6\xb7\x88?\xf6\xa7\x07\xa5<\xc0\xdf@\xd4%\x0c\xda\xd0\x007\xb5\xcd\xa6\xb9E\xf9\xa2\xf8p\xeb\x0b\x93\x8c\x08\xb7\xcb\x8a\'z\xca(\x91\xd4\x8b\xdb\x91\tY\x1f\x07\xe3)\x15\xfbA\x1f\xe9f\x84\x9b[*\xab\xfdv\xb4-\x0e\xfa\xc4\xef\x95\xf5\xa4|\xcb\\\x89\x8d\x99\x16\x82\x0c\x8b\xbfh\xf1\xc9\x90\x0b\xf4\xf1\xe2EM\xa3\x1biK\xa98\xbb\xd7\xd0\xf0B\xe8\xaac\xec\xc6K\xf1\xa3]@\xb08\xe6\x83\xf4\x18w\x9fB\xfa\xe0\x94\xb1cY\xae\x92\x18@\x01@\xfbc\xb0\x8bf\xad\xe1~g\xba:Sy\xb8\x04\xf6\xbd\x19\x16\x0e\xe2S8\x89\x95\x81\xfd}. \xbb\xf3:\x1c\xef\xba\xa7\xd9\xb8\xad8\xf8h\xf7\xb0\x98\xe0\x1d\xbf\xad\xc4\xee\'\xae\xe1x/\\M\x98\xffg\xc4Bl\xcc\xcd<A\\\'es\x8e{e\x94\x87H\x1a%|\xb2\x01B\xfe~\x07\xa8\x88\\\xed.!\xfb\xab\x17\xc8+>\xe3!\xf7\x06\xd4;\xa5\xa2\x0b\xe1\xfc\xf9\xa8I\xf7\xed\'\xa8{U\xec\x815\'\xf0{Y\xef\x87\xfd\xdb#\xb6\xd4V\x9e\xe59\xd8\x9b,"z\xfb\x9fO\xde\xf0\x93(k\x1d\xf4\x12\xc9y\xc9\x02}A\x11+M\xf1\xbd\xb2Zr\xd0\xae\xb7\xc5M<\r5\xb7\x85\xef\xf6h"\x1c\x02M\x8b\x91\x89\x18\xb0\x9e\xd8\xc4~\xf0(\x1a\xe8\xb1q\x127LE\xf5<\xa5\x13\x9bh\xb3\xc6\x94\xc4\xc6\x14]\x90\xb2\xd5\xc7\\\x15\xb2_\xa9\xd7\x8c\xd2\x15(o9\x99\xedx`\xa9\xc4\xce\xc4\x02\xc5\xf2\x10c\x8a\xdb\xfa\x83\x0c\x1b\xc0\x8f\xbd\xces\xa6\xbc\x1b}\xf0\xb1\x8e\x1ft\xfa\xc6\x17\x8b\xc7\x04\x9b\xbe\x95w\xfb\xaa\x8c\xda9\x08R\xcb\xd3\x18@\x99\x85\x8d\xee\xe2f\x9eZ\x85\x06\xe7`\x001\xb3\xa0\xf4v\xc4\x99\xf5\xd5\xbcG}\x1f\xa9\x19\xf3\x99\x92\xb8\x9a3\x9a<\xa6\x96=F>\x1fX \xb7\xb4]\xe8\xc7\xf5\xec\x97\x02e\x91#\xa1\x8aD\x08\x9aLI\x8c\xc8\x88p\x18\xbb\x00\x9b\xe7\xa0\x0e[\x1a\x9b\xbcvw\x88Mof\xb2Y\xd8\xf9\xa3\x9a\xdaHb\xc7>a\xb9J\xb5\xdd\x04\xa2\xdc\x86\x1d\x97FC\xd0>\x05\x11V5\xcb\xca\xf77f\xe6\x8fr\xf3\xc5\xef\x97\rc\x15U\xa1\xe0\x81z\xb9\x1e/sf\xdeA\xe9c\xc4Q\xb8I\xdb 4\x81\xef\xea\x87\xe2\x8fOD\x81\xbc\xc04a\x0b\xbd\x065\x05\x89\xc7\xe62\xa2u\x1e\xb2\\\x1dz\x11\xf0\xe9z\x96\xbd<\xcd\x9a\xe0F\xecL\x0b\x14\x89\xe5\xf0\x85v\xe0\xb6\xd7\xde\x93&\x1d\xa3a\x7fy\xc2h\xde\x1eA^~\x05\x94\xe7\xd1\xf0`\x9f\x84R\x03\xbas\x0c\xab\xa1\x00;\x10\xf2(+\x89T\x81\xcc\x8ea\xfc\x1a\x98\xbf\xb4\x18\xc5\xde\xe7\x92\x85h\x86\xd3G"\xfd\xddBv\x96O\xe2\xd4\xa4S\x94\xa0\xcf~\xe0\xcb\x94\x98Q,1\'\xe8\x84<;\xb7\'\xe7\xe7L\x94\xce<F :\xffu\x8f\x14\xfb\xdc\x9c\xc9\xb2\x1d\xea\x12P\x90\x87p\x88Yt@H\xbe\\\x1e\xber\x96d-\xe6\x10\xc9\xe7\xc8\xf3T\xecy\xc6\xc6k\xf6\x05\xc9@{\xd0\x1a\xa2\xae\x7f\x92\xb6d\xfd6:o\t|7f\xa9\x06\xee\xd5\xbf\xe6\x14\xd5\xd4\xbd78\xf6Ry\xab]\xc3B\xe9\xc1\xa9k\xe2\xaf5k\'\xf0x\x95\xf1\x1b\x87`6\xdf\x80D\t\x03\x8e\x92Z\x86g\xa6\x94g\xb7\xad\xd0_\xf2\xd1\x8b\xc9\x9d{\x8bK\x8c\x11\x9f\xfa\xfb\x14\xb2\xfd\xeb1\xf7\x1f\xc1\x1e&\xe6\xeeZF\x87\xb1\xeb\x94\xd2\xaf\xb7:1\x99t\xd5\x12\xfbt\xd3<\xbb\x81=\xaf\xa7\x91\x16\xe3G\xb9Qb!\x07\xce\xb0\x9d\x0b\xc2\x94J\xe2\x01\x95\xfc0\xf0\x94\xbe\xd1\xb6\xc3\xf6\x87\xd0\xd0\xd2\xab\xcfq\x94\x9fJ)<]\x0b\x89P\xbb\x1c\xa7C\xbd\xcd\xbf/0\x8d\xfe\xa5\xa8\xd8\xc1\xd3a\x9fg\n\x99\xb4\xd7\x94\x99\x12G\xc7\xb4\x18\x9b.f\xb1\x98\xcf7\x90\xd8|\x18V8\xd6\xe5\x90\x03\xbf\x92\xcf\xbf\xa6\x13}bw\x83JZ\x0c\xab\x92\x96s\xb3\x81`\xf4\xed\xd0\xbf\x1c1\nx\x92\xa3\xf0\xde,\x9e\xa3\xfe\xb1d\xbb\xe6n=\x06G\xf5\xdf\x1a\xacHI\x9c\x1c\x91Q\xf2_\xb6f\xd5P\xd8\x0c\n\xe6A\xe2\x10^\x1c#\xe8\x0c\xcd7\xf7$\x97\x83\xfe\xaaB\xf0\x9aGp\nd\xce\xcdpt\xb7\x06\xdc\xad\xcc\xe6\x9e\x030\xb1v\xf1\x7f\xdd\x9d\xd2\x90\xa3/\xa2\x98\xc1=E\xb9H\x85\xa6\x85\'\xf3\xb9\x1d\xc5\x84G\xd7\t\x86.yA\x12\xb3\xe3{\x03R\xea\xad2\xf4\xf53\x95E\x1c\x92\rF\x8b\xfe\xcdb\xc5\xb4I\xa8:\x87tQ\n\x86_\x1d\x9c\xc6x\xe7\xe5\xdb\xd5\xfcs9\x9d\x83\x1c\x8f\x9e\xbc\xa4\xdfWrn89\xbe\x92\xf7>\x94"\xb4e\'r6\xc7\xa6Ur\x16\x07U4\x90\x14F{K\xea\x97\xea\x8f\xd1\xe6\xde\xc2W!n\xe6\x96\x8e\x89\xe7\x16\x0fH\xca\xe1\xd9\x7f\x80=7T:d~@\x9f\xd1\xfb\xf7\xbdt\xe7\xf2L\x7fK\xb5\x11\xc5\x82\x8fW\x81 \x99\x08\x9fG\x19\xda\xbe\x95\xd8M!\x89we\xf6\x90\xb2)\xb0c\xa4?\x8a\xfe\xd2\xd2\xae6\t\x8b\xc8\xd5\xa13\xbf\xb1\x85\xf4\x95B\xdb\xcad\x88=U\x05\xc9\xf4\xf0\xfe\xba\x81\x07~\xc9MD\xf4\x1b`\xce\xd8\x9c[\xf5D)L\xa7\x91\xf1\x1c\xf3\xfe=D\xb4\xed\x1a[\xd2>\xa1\xf0\x9cQV\x85\x02\xc8\xe9\xd1\xa8\x15s7i\xed\xd3ep\x95O\xb7\xb4\xde!\xf2\xe9\xfd\xaa\x9c:\xbd\x936j\xe4\x1d\x99\xa0\xbciT\xfaIoz\xae\xe2\xf8\xb3XQ\x17\xefkT\x1b\xcf?\xdb\xdffqh;\xca\xb2E\xf4\n\xab\x0c\xbf\xa67#\x04U\x91\x7fi\xb0|?7y\xe7uz\xd9\xf0\x12\xd1b\x11G\x14\x8c\x1e\xe5;\x07T\xe2G\xd3\x96\x02(\x07\xd4\xb6\x80\xeci\x8a\xcb8\x157|I\x7f#\xa7\xb3\x98r\xf7Vba^M9\xcb\xbb\xee`Y\xccsr\xba\xbf\x95\t\xbd\x81jl\x155+\ri\x1du\x18\xf3\xb0\x1a3=\x94\xdaQ\xe7\xe4I\xb0\x1d\xba\xcf\xde~\xd0A\xb9\xbb\x9d\xeb\x8b\xf4Y\xabU1\xfd\xbdd\x15\xeek\xe4\x8d#;*i\xbf\x9a\x80R\x7f\xccL\xce2\x88\x10\xff Z\x19\x9c\x98\xd5\xaf\xe3\x93\xa3\x13\xc8\x96\xadT\x9b\xea\xbdj\xae&\'\xdea>9#\xc1\xf2\xf1`\xc7\xb5M\xec\x98\xd9\x1eV\xe9qY\xc6& e\xa2\x83\xfa\xa9\xb8!\x98\xbc\x0b*y\xea\xea\xcb\xbb\xd8\xe3\xb7\\at\xdc\x84\x01\xb1\xc3\xa8\x89\x02\xd6\x86\x84\xfc-\x96\xf0?\xa6P\xf3\xack<\x89\x05@U\xd0\xbb!\x1e\xbc\xb0\x96\xa2\xff\xad\xca$N\xa0\xb5U\xb7@[,8%\xcb\xbe\xb2\x86B\xbc)\xeeP\x1bG\xf8\xed\x86!\x9c\xa7\xf0\xb1\xe0\xd0\xb4(M4\x8b\xc5\xeb\x03\xf6Y\xeb\xce\xe8d\xd2L\xd6\xbd\xdd\xb8\\\xdc\xa7S\x96\xb0\x86i\xa7i\x8e\xa2\x1b\x8c\\9\xb5\x13\x12\xb0b\x1e\xec4\xbf\xd8\xddJ\xd4\xf0\xa0\x9a\xa0\xb7\xce\xf9\xca\x84b\xc1\xecX\xd3\xce\x88\x05\xe5\xb6\xc1\xe7\xde>n\x0c\xb1\xb7\xef\x89\xd8\xc1\xe8\xb4!\x805\x91\x95\x1bXf\x15s\x07\xde\x03Yp*\x05\x9e\xe0\xa0j\x04Og\xe3\x84g\xba\xa5\xc1\xe3\'\xba-\xd1?0\xe2\xb4n\xba\x1e\x06\xa6\x18\\\xe3\x9b\xa5\xa0R\xb3o\xb5xAIX\xae\xf7\x91\xec\xb3\xdc\xc8\x10\x14^\x1eS\xdd\x8f\xb5\xab\xaf\x85x\xe2\xfch\xb9\x98p\xb8\xb4\xbc*\xed\xab\xf5\xc2:\x8f\xfe\x86\xa5\x80"\xb5\xef>\x0e}\xcf\xe3D\x9c\x9a\xef\xe2~\xdcs]0\x9bl\xd4\xb1\x1a\xc9\xeeu\xde#St\xb1\xeb\xb9\xa7#\x92A|]\xc4=\r\xaf!\xad\x05a\xf9\x9d\x92x\x90\x82h\xf5T\xef[\xf4\xd6\x16\xa66\xc2o\xf9\xa4\xa6}\x9e\x7f\xae\xee?[\x16\x04\xc6\xa75\xec\xca\x86\xca\xd7\x81J>\x9e\xf9\x06JM\x9a8:\xebN\xa2y\xd6HrGQ9\xe9\x0c\xca\xa5\xbcR\x1fHKTK\x0b\xf0m\'i\xfc\xee\xf7\xd2\xdb\x17\xa9\x89q;\x84\xde\xe0\xee%\x9b\xdb+\x9a\x83\xa4\x16\x11D\x1e\xf0\xb4\xb5C\xa8C\xdb\\\xb8\x1a\xcd\xd6&yU\xb1\x1d\x85\xf2\xc6k\xed\xe8\x95\x1b&"}\r\x83\xde\xda\xa7\xf3[\x90\xc4)g\x9f\x1fyEB\xf9\xb9\xad\xb5\x1f\xe5\x11\xb4\r\xbd\xac%\xe3L\xbb\x87\xcc\xa7\x99\x10\x8e\x9a\xe4C\xca\xf5\xa1UN\x98o\x05I\xb3\xd7k\xd9\xa6\xf0{\xfe\xc2 \xd0\x87\x1f\xe0\xa50\xf2%\xb8O\xd9\xd3n\xa7\xd2C@zu\x08%^\xeeT\x8c<\xf7\x08\xca\xff\x1ds\xd8\xa7\x0eln\xf9\xc7XWs\x9c\x12\xe9\xd6\xf5v\x15O\x9cI\x8e\x130\xdar\xd2\x14$\xcdi\xe3\x8c\xb8\xac{\xb0\xd7ll~}\xd1p\xa8\x8f\xaf\xc3\xfd\xd4\xee\x0eg\x1f\x15,\x93\x85~\xd1\xe1\x8aj\xa2\xcc\xf0\x87\x0c\xdcm\x81%\xb1\xbc\x91,\x93\x96\xa1\xe8\xeac\xd1\xd2\xc9\x11\xf4\x8cTvj\x0c\x99\x9bQp\xd7\xb2K\x99\xcd\xfd\x0e\n\xa0f0\x8f\x14l\x16\x8b\xd5\xca\x87\xa4\x7f\x8f\xd0CY\xf7&\'\xfbu \x11\xef\x0cf\x10P\xd3\x8fy\xa2e\xaf\xaf\xa0a\xfdp\xb0\xf1\x8d\xed\xa3\xa5\x9c\x8b\xa8wv\xd6\xe5\xca\x82\xbb\xd3\xf8u\xa6q\xbdac\x97*\xdc\xd1p\xd9\xb5\x1bT8\xdbo\xde\x8e\xeb\xfd\xb6\xec"\xee\x98\x02v\x1d\xd2\x0b%1vW\xac:\xf1;\xbd\x92\x87q\xdb\xba\xaf\x881@M\xeaf\xf8\xbf@\x01\x9fds8c\x00\xb0MY!\xf2\xee\xce\xdf%S\xd2*T\xa8\xa9`\x0b$-l\xc1W\x85wC\xd6M\xf8cTf\xe3\xf0Pbq\xac\x90\xb3\x9b\xa5\x12KA4Mm\x9epi\xdb\xdd\xe0;\x00nT\xd3\x17e\x19D\xd5\xe5\xe4d\x11\xc1\xea\x9a\x1f\xd8\xddB\xb2\xbd\x1f\x9b\xb8AC\xa9\xc4\xca<\xd9\x9d\x11\x15+\x17\x00*\xb4\x9f6\x92%\xb8\x18%*\x0e\x96z\x8c\x89\x82\xe7Y\\\xa4\xac4Vk\x1eF\xe9\x89S\n\x92W\xf1W-\x87~BgA\x9b\xfaV\xae\xd3\xdd\xc6=\x8e66f\xafz\xf7\x15\x14\xdc\xdb\r\xeb\xfc\xd3-\xe7\xce\x894\x83\x0f\xc3\xdbE\x99P;\xd4\xfa\xcb\x900Xq\x1dLD\xb8r\x06\xa2\xdf\xa8\xd2\xfdG\xeb\xb2U\x97\xf34\x94\x1b\x85\x01>]\xd4\x81u\x10%\xa3\xa9!H`\xdd\xc5LU\xe1\xa4\x01\xcc\xb0=WO\xfd\xa9\xae\xd1\x8c\xa1J8?\xb3\x9e\xcc\x0c\x8b\xb7\xd3g\xec\x86|\x0cFC2 \xc9\x8f\x18+\xd8k\xae\xb3\x07s\x9bs6c\xd8\xca\xd1\xba\xc1\x96\xc0\xff\xacG\xcf\xee0\x01B\xa6\x85\xa8\xbe\x0c \xdb\x02>\t\xf2v)\xb0\xa8p\x07C\xc5A\x992\x90\xb8j\x81\xd7l\x13\xd7k\x97\x13\x07@M\xe6U!dS)\xcf\x9f\xb82\xadT\xbb)\x83\xf2\x95)\x05\x8c\xe7m\xd5\x83`\x03\x82\xe7\xfb\x03\xe7"\xc7\x1f\x05R`\t\xa6\x06\xd0\x8a\x00\xf0\x08V\xf3Bv\xbeI\xc8 \x896\x1bN\xba\xf9\xf5\x18k\x80\xf9j\xe3\xc9\xf0x{\x19e\xf3K\xae\xc18a\x19\x84\xea\x9f\xea\xc4dG\xd8E\xcf\xbc\xea\x9c\xb1b\x03\xdcasw\xc8d\xa6\xe7\xb5_\x9c\xf9\xd6>\xf2\x01\x12\xcc\x99!\x1cAaS\x08\xbfI;\xd7\x10\x91\x8e\x12\x0c\xe5\xd3\x01\xdf\r\x7f\xba\xa9\x17\xd40\x8c$\x01!\xd9\x06\x83#^\xb4\xb1\xb4-\x84k\xbd)\x86L\x0e.\x8b6\x1c\xf8\x80\xec\x95`J\xa0c\x99\x15\xe72\xcf5\xfa\xaf\xfd\xee\x01\xdb\xe3\xd1\xec\xd3\xae8\xc5\xcb\xd2\xeaB\x08\xaa\xfb(\x96\xfb\xb4l%\xd1\xc12I\x1f)\xfa%\xda\x1a\xc7\xc2\xff\xf7\x8bn5\xed\xe4\xa6T\x1b/\xf5\xe7\xc0\x854Q\x8a<\x05\xc5\x8e\x96\x1a\xb3\xb5x\xeb,\xfe\xe7\x03B\xb4\x85\xa8T\xba@\x07\r\x08\x7f\x9f5\xd2\xd2\xb7,M\xe5*\xcc\x99\x1f\xa6\x98\xd7\xcc]{\xa8GH\xff\xfdI\x9a\xecc4Z\xa2\xe7\x84_\x0eM*\x97\xe6\xab(\xbdZ\x853X\xe4\x9a\xb5Mc$k\x88\xef\x16\xc6\x96\x85\xfe\xc5\xac\xa4\x1f;\xc5V\xcc~\x03\xcb\x9a\xd2I+\x8a\x13\xc9\x17/\x0f\x89\xb102\x7f\xf5}0\xd1{\xce\xdc%\x9d+\xa9\xca\xac\xa3\x82^\xc4\xe0\x05\x8dq\xa1\xc5\xfa}i\x1e\x16\xd1\x03\x9dM\xb9\xe6\xea\'\xa6[O\xaa@\xe9sm\x19t\x93\xe1\xae\xc9\xc5\xe0\xc4\x18\xcd\xbdk\x91\xdb2\x8fH\x140\x95kXiK\x16\xdd\xf9^GEk\x7f\\\xf4\xcem% \xc7\xafF\x8d\xe9\x18\x12?\x89B\x0cB\xd4M6-\xee\xd9\xca\xe6U\xbe\xb8\xc6\xed\xd7\x00|+\x97Q1L\xb6\x05\t\xa2\x95Ds\xc2a8\xd4:\r\x7f0F\xeaDmo}\t\xdd\xc1k\x1bA3\x16\xedJ\xa1:\x8d\xf9]\x92S{`\xe4\xbaa\xf2\x8c\x8c\xb3F\x98\x0f\xb0B\xc5JWm\xc3\x9e\xa0o\xc7\x87W\x8f\x1d\x01\xeeJ7p\xfd\xd2DA\x94\x10g]\xca\x89\x11j\x13tm\xcer\x10\xd2\xe7o%\x85\xe3\xfa\xba/C\x12\xa8\x1e\x0bl2\x85\x92\xb8d\xd0\xbf\x8d\xadY\x94\xecBq\x84N\x9cmI\x8d\xb2\x8f\xf26\xfd\xd8\x12w\xed\x19\x81\xb9\xb3\xca\x9a\x07\xd8>px\xf8\xab\xb4\xba\x15N\xfe\xf9\xfc\xb8\n!YI\xe2\xde\xe54\x1c\xe2\x96F\xb95\xf0C\xd2\x05\t^\xa1c\xe3\x9b-\x16pI\xf7YZ\x90\x03Y-\x7f\xbc\xfb\xc45\xf8\xc1*\xffy\x86S\xe2\x1a\x1d2wc\xff17\xec5\xa3+\xab(\xa5o\x9a\xe2\x9e\xe5\xf0<1t\x14G9\xb8Pi/\xa3\x9b\xc9\xd856\xae\xee\xec\xde\x15\x81\xd3-\xe1\r\x1ff\x99\xbeH\xa6s\xef\x99\x1b\xd6\x8c"%\x80\x84\xc5#\xe0\xa5B^\xf3,\xf5\x9bL&\x9e?%\xc0\x7f\xe62\xb7:\xe4\'\xa4\x1b`O1I\xed\xd3\xf7WV\x02\xa9\xf9;\xa6\xb8\n\x06\xcf(\xa4 \xaaAf!1L\xe3\xe4\xca]\xbc\xcb-p,\xd8\xcfQ~\xe5K\x04\x19^8\x95\xbd\xc0I\xf6\xf5\xb3\x10\x0e\xff\xf2\xc5\n\x8aF\xf8\x84u\xc6s\x07 \xd1\x13K\x0c\x89(\x1d\x01\xf3\xfc\xbe\xdc(^\x8f1X\xde\xfb\xac\x07\xcd\x9bP\x99~}\x9eF\xd3\x10e\xa1\xf6\xa7\x1e\xee\x05L\xc3\xd4Vu\xeb\xcdr\xa5\x1b\x15\x8c\x97\xf4\xa4\xec\xef\x15I5\xff\'}\x9c\xf6!\x8a\xb2\xdf\xb1\xa5\xaeh~(Y\xe6\x9c\x94\xea\xfc\xca\x99y\x19\xbeb\xcb\r\x88`\x07\xde\x94__\xfd\xf0\xb0bq\x8f\x00#6\xb2\x12\xd4\x8d1\x03\xf5\xd0M\xe4\xdeM\xb1<\xd1&\xb44\xd5\xa3\x88\xe9\xe7\xb9\xc0\xec\xa8Q\xba94\xe4\xcb\x82\xa9\xc8N\xcb\xa5Y\xfc\xac=\xe4\x02\xe6H\x00\x82\x92\x93\xd9+\xb1\x0b0\x8f\xb7\x8b&)\xa3\x91\xdf\n\xb7\xa3\'\xddl\xf4\x89\xc7\xf9\xd1\x89\x0c\xd41"KsH\xd40\xdfjW\x8e\x98\xe5\x86\xfb(\xd5\x85\x0c\x1f\x1a\x90\xa5,\x90\xff\x8eD\x1f\xc8\x80R\xc3\xe0DL\x04\xf5K0\xf4p\x18qN\xc6\xe4b\x958w\xf8\xd4a7\t\xd7ct\x08`\xcf\x06p\x16\xf7tQ\x9b\x96\x89\xba\x1d\x89\xdd\x8cy\x06\tSM~\x14>=1\x92\xf2\xa7\x99WWVh}\x16\xd7\t[\x0f"\x1fO\x9d\xb9\x04,\xa0:(\xc7*\x1d|7\x8cq\xbd\x04\xa8\xfc\x8b\x87SS\r\xc6\xf0\x99\x11\xf1\x9c\xff\x102\x94w\xa1\xd4\xca\xff\x07,\xe8\x97\xa17\xb6\tb\x12F\xdb)\xb2\xd0\xc5\xdc3\xe7\x97\xdc\x0e)\xafp\x16C\n\x0c\x05\x15[fRP\xc0"\xbd\x19\xf7\x08\x8b\xa4\xcf\xa3\xb6\xae\xcc\xe1\xcb\x1a\xd9v\xe0\xa4\xea\x8b\x97\xb7\x04\x14f\x87\xd5\xdc\xb5b\xc2\xd0\x19\xec\xa7{i*\xcf\xf9\x0b\x02\xaa\x7f6\n\x9c\xcd\xdbX\xc1r\x85b\xba\xb4:\xf7T\xa3<!\x95c\xf9d\xc7zQn\x17\xf2\x00\xac\xda\xd2\x0e\xab\x89\x14\xafP,\n\xc6c7\x11.\xaf0\xe1\xbc\x19\x91,\xa4S\xd7\xbe\x01\xca\x01`z\x88\xd6h#\xb6\xcdf\x17\xbaw,\x1b\xe2]\xb0~\x96\xb3\xa1\xd2\xeex\xd1\xe6\x81>\x04N\xe4GS\xcaf\xae>N\xe5\x0e\xd8\xac\xc3\xbdR\xa2\x1c\x8b\xef\x97p\xef\xca#\xda\xd4P\xf8b\x9d|\xe6\xd29\x82\xcd\x8e\xe5\xfd\x98\xfdo\xedj\x84~`\x14;a\xceE\x13\x0b:I\xe6UeeGw\x06\xb2\x8d\x8d\t\xb09\x8b\x1av\xfegY\x05\xc2\xb0\x10\xa7\xa73\xb3yw\x85c\xdbW\x1b\x92\xfd/Y\xe8y,u\xaeN\xd7+\xe5\xe5\xa1\x87DV\xd8m\xeeh\xff\xa1\x1fF}<}\xadTH\xc9f\x8e\x9c\xbc_\xba;V`\x94\x93\xa5\x96\xea\x99\x0cI\xbd/\xa3\xb6\x10;\xf0\xf0\x03\xb8kf\xfd2\x03\xf2\xac\xdb;\x124\xcb\xd6\x97N<n\x9c\xd9^\x08\xe9\x1f\x13u+"l\xf4E\xac0k\xd5\xef\xe0R\xadW\xa4>}U\xbez\x13*\xdb\xb1\xa5\xe8\x01\x0e\x1c\x17\x8d\xe0z\x98\x86\xaa\x08\x84\xf0G\xb2\x98\xca\xb5\xeaO\x87\xaa\x13\xb7\xef\x03\xf7\xd0\x99\xf4\xa0\xdb?B(\x0bl@\x8c|\x8eFN\xda\xf0W\xc3\xad2\x1d\x91\x87\n\x8fR\x12\xb5\xce\xb9F\x13\xeb\xe8\xc9\xc3\xec\x06\x8b\x8fjd\x14\xe4Vr\x8f\x18\x06a\xaf\xcc\x03DudY\x14\xa3\x86\xd0\x0f\x7f\xedwx\xd1\xd1{h\xffU\xa2 \xb1\x80\x9e\xfa\xf25@\xeb\x90\x9e\x1b)\xfb\x7f\x1f\xe1f\x99\xf9\xaa\xeb\xd1\x92Yb\x08\xf4\x96uE\x99I%\xe1g\xdf\xbep\x13\x80\x0b\xb4o\xd7\xb5gn\xd6\xa1,r1\xca\xa1H\xd6\xef\xbe;\xe3\xd3K\x08\xdbM\xc0\xd10\xf0\xf3e\xae]\xa6L*\x88\xfd\xb0Rw\xad\xd4\x9c\x8c6x\x18\xc7\xe1l[\x06\x99\xe2\x1cb@g\x8c\x17\xfe_\xb2\xcf0\x85\xf2V\xe0@\xb0E8R\xfd4\xce\x8b\xbe\x82\x87\x19\xe7\xa5@\x89\xb0r\xe7\xdf\xb3\xd0U%\x92\xc5\x9bP\xf3\x97\xa8\x95b]\xe9g\x98\x8e\x9e|D\xbf0K\xf8L\xd2"D\xec$-\x06v\xdd\xea\xf4R\x84\x17\xdd\\\xbd\xdb7\x8b\xb4\xa1\x85R\xd1\xa9\x11\x0eC.\xbf\x1e7\x88\xed=!\xcd\xb9b]\xf1\xeeb\x1e\xc5X\xc7\xf6,zO\xca\xb3\xd7\x8c\x97j\xff\x02\xab\xc7\x8f\x1d\nq\n\xe7\x83\xad\xeb,\x1a\xfd\xca\xea\x81\xf0\xa8\xb2\xd9\xc0\x18~\x87T\x01\xf0\xdfN\xc5\x98e2\xf6\xbc]\xdb\x92\xc2,\xa3l\xee\x01m\xf4|\n\x8b\x8a\xf5U~rx\xeb\x12\x9f\x18m\xa15#\xeb\xb2\x9c\xc4)f\x8d2\xf2\xe3\xfc\xc3\x17\xdc\x88\x9e\x1ft\xcd{\x83\xbd\xcb\xba\xaa\r&\xb0\x146\x8b\x08+3\x9bc\x1duOr\xcd\x83\xd8\x12\x0e\xc2\xbd\xcc\xc3]e\xc4T\xc7\xfd\xfb\x81\xcc\xe0\x113\x94\x14\xf7\xa2\xe91\xe9\xa7(\x0b"\xcb\x81\xa2\xb1\xb6\xec\xcfy\xb7"\x86\xfbF\x1c\x86\xa3\xb4\xec\xd3KUj\xad\xa4\x93b0R9\xa6\x16J\x82>\x89\x01D\xe6\xf3\x02\xf3m%\xf0\xc0}\x17 \xbf;\xf0\xe5\x04wS\x8f6V3\x8ba\xf3Y~T1\xd4\xbcY\xe0\x88\xdd\xf1\x0e\x1a\x9e\xd1\x00\xedfV#p\xbf\x00\xd7\xc1J\xdf\x0fW\xad\x8b\r\xca\x9a\xa6\xe0\xac\xaf\xf1\xe1\xd5r\xc2*\xbb\xe2\x08\xae\x0f\xa5\x9e\x9c\r^\x9e\xe5r\xd1\xb2Gzc\xed\xd9&<1}mX\xb0t\xb4\x9b\x8eKO\x04\x0b@\xd6\xfc\x88\xc7\xc7)f\xe3\x08\xf3\xbbn\xd94>J\xb7s\x12\xf0\xa8i\x0e\xd5\xca\xf7S_$\xbf\xee5B<6\xa1v\xd5\xadj\x0b\x18\'\xe9\x1a0\x9c\xf9\xc6\xb4\xf4+\xec\xdaQ\x13\xe0\x02\x8e2+\x0f\x10\xca\xca\x08\xe4r\xe9zP\x9a\xd04\xc5\x1a>\xe216\xbfT\xdf8\x86\x8f\x9ac\xaae4\xbal\xca:\x9a\x7fi\xa7\x0b@\xd5wD1\xfc*\xf0\xacs\x7fo<\x9cc\x82^$O\x0010\xf2xS;n+\xad\xba\xea\x8b\xfb*\xce\xfcAK\r\x04\xbd 5\x18\x04\xb2\xa5\xc30\x7fGEk.\x05\x0fk\x10:\xdc\xfb\x86\x92\xb3\xc4b\x1fN\x14\xb0e\xce\x0c\x9c\xe3g\x0f\xd0\x8d$\xff8\xa8\xdd\xc0]\xe7\xc9d\xcfQ\xdb\xf1\x15#\xf7[[|N\x0c\xd1\x9a\xb0\x84\x1c\xd2v\xb5|v\x1cQ}\x16\x0e\xeb\x8e\x06\xb0\xe7\x8d\xc4\x81\x13\xac\x10\xecm\x14HC\xa6\xf5\xcd\xf1\xd8\xcb\x86\xc0]]\x9d*I\xe4W\x16b\xc9\x1f@5\x95\x06iB\xdab\x91\xe9%\xf7\x88\xd4`\xc2\xdd\xae\xb8vx\xe0b\x961\x7f\x0e\xd5\xdf\xe1\xb9\x8d\xf0\x91\x03\xc3\x7f\xa7\xa4\xdc3\xe18\xd8z\xc4\x8dMA\xd4V\xc4\xc5\x91\x87r>\xc0V\x01D\x0e#D$R,\x88\t\xff\t\xad\x17I&7}\xeb\x983X\xb1G\x89\x9a\xe3\x18\xc57\xd7od\xaa\x0e\xf9%\xf6\xc4\xab\x1e\x1dX;#u\xad\x97\xffo\xdc\xcf\x81\xc6\x8f\x16\xa4\xf6^\xccB\x8e\xee\xcd\xe0j\\\xdb\x8c\xed\xc2\xc3#nT\x95\x14\xd1\xb6u6\xfa\x1d\xaf!?O\xf7\xdd\xdb\x85\xce\xe6H\xf6\xe3E\xe2Q\x9c\xf6\xa3\x9a\x1e\x84w\xed\x16\x0e\x13\xb0\x00~\xf0ES\xbc\xac\xe3\x91;U\xd6P\x88\xb7\x08\xfddN\x96\xf5\xf3E\'s\x9a\x1a\xba\x85\xba\xd9\xf5\xdfY\xcaw\x8fN\xaa\xc0\xda\xea\xe5\x9c\x1cX\xfa+|,L\x9d\xbcEY\x81\xdfI*!Cwd\xa2\xb9\xca\x17\x9b\x94<r\xfb\xcd\x17`,_\xc6g\x0e\xd4\xc4\x164\xb9kL/\xde\xb0\x95V\t\xc1\xc5J\xb10~\xdb\xb6\xc6K\x8a\xd4%\x8c\xack\xea\x1aM\x8d\x17?I\x0b\xf5\xe0\xbc\xcfq\\:\xcbB\x1c0\xc0\xa6|\xa3\xa7\x8c\xb7\xf6\x1b\xc5C:\xe6\x08\xb0\xc1\x05\x88Q\xf8\xa0pR\x83\xb7-/\x19\x0e\xc3)\x12\xc9\nf\xea\xfcL\x82T\xb7<\x9f\xa3\xe7\xd1\xdb\xb8\x95\xddj\xafL\xb6\xf8YY\xa9~<7T\xf0L\xb5\xab\x9f\xe5s\x1b\xb1\x1a\x1aNEU\xa6O\x01\t\'x.\x12L\x02r\xdc;\xe3\xf9\xc3\x85\x15B\x0b\'\xc1\xd7\xcf\x12\x87\xac\xbcC7\xee\xe6Om\x8dH<\xe4<\xf1N\xeb\xcf\xd3\xec\xdf-\xb5DR; \xa8@+\xed\x1ffv-\xf9VGM\xae\xd9V\xc3\x82A\x19M\x07\xa8\x8c\x18@m\xc4v\x8c9\xf9\xc6\xcc\xc5\xe3\x8cW\xff\x80\xabp\xb2\xffAn\x9c4\xcaDk\x98\xe5k3%\x01\x8a\xf1\xb0\xab\xc1P\xabw\xc7\x94\xae\xd0\xa1S\xf1D1v)f\x84\xff\xf9\xf8\x8cq\xc7*Au+\xd8\x9c\xa48\x1a\xc2\xab\x8b\xbd\x8c\x9e\x8d\xd9z\xcc\x90O!\xd9\xf3*\xf5\x94\x16\x89+3\xf1\xd1\xe6\x1ed\xd5\xffL1\x05\xe9N\x17\x1c[\xcak+h\x807cBF"\xb7\xb9:F\xd0\x14\xa2^\x08\xcb\x9f\xb0\x90\xbe\xb5t\xd8\xd9\xd4O-\x9f)hJ{3\xd3\xf8\xf2\xd3\x1e\x02\xcc\x044\x1f\xde\x07\xb0/`?\xc0Zz\xdf\x16\x16\x98\x04C\x00\xe3{\xe3\x94>\xecZ\xea\xe8\x16\xeb\x99\xeb\x15%\xb1D\xcc\x84q\xfaR\xb3\xb5\xa6\xf7\xca\xb1\xec\x86\xbbM\xe2\x14\xc9\xb1M\xf4\xe8\xc9\x82\xf0\x93\x93\x0b\x93\x11\xfa\x0bMo\x1f\xc8\xa2\xefa\xb2>R\xa0\x94\xff\x14&\xdd\xc2\xd5\x07^\xbc\xa0\xe5k\xa9f\x0e\\\x00D\xcf1\x96\x97\xe1\xbf\xff0U\xa2|z!\xe1\x08SN\x11\x1dZ5\xbd\x1b\x87\xc88V\xf0\xc6\xc9\xb8\x08\x83^\xbfhU5\xf2\xb9\xe3C\xb9N\xfc\xe3\x81\xcc\xc7\xf24u\xdf\xc3:\xd2\xaa\xffe\x1eN\xa4\x81\xf5\x07\x84|\x9d\x8c\r\x94\xda\xf5]$\xd5\x1aT\x0bl\x94\xdc9\xfa\x9b\x925P\xfc\xdeRS\xa4A\xa0\xbc\xf7\x81\x80#\x9b\xacB\xb1\xc7~\x06\xf4\x7f\x01$\xa6\xbb\x81\xba\xbe\x81<N\x00\xaae\xdf\x98\x1fG\x84\x85w\xeb.\xdd+]\xe6W\xd3\x8d\xa4`\x01\x8a\x13I\x92\x83\xf2\xe6\xe6\x8e\xb1\xad\x19\xfe\x8b&\xdcq\xe7J\xf4s$n\x90\xc25\xd1\xa0\x06\xac\xaeH\xd6\x94\x9a\xd0\xa8\x82\x9fG\xf5\x1e\xaez\x0csq=\xee\xb2\xd94\x1f\x7f\xe1\xb8\x11=p\x83\xd1\xb2\xf5\xec\xad\x92\nLT\x10\x9d{\x1cD\xc2\xd5\x8f\xd1a\xeda\xc6-\x1d\xe4xVri\x90\x82]\x1b\x9b{:\x80n5\xee\x14*\x7f}]8q\x15-\xec\xd59\x03\x02Q\x05MR\xde\x05\xac]\xeerfB<\xb9z\x19\xc0h\x10@\x1cT$\x1c\xad\x88\xc1\x9f]U4\xe0Q\x08\xdb$\xbf)=\x18\xba\x90\xfaxa\x9a\r\xe6E\xbc\xd1L\x06J)p\xf4\x9aKy8O6\xcfu\x91\xa6(\xdc\x8e\xf1q\xe5\xe5m`"\xb2=3\xf48\xa8<\xeb[\xfcK\x19\xe1\x13*|t~\xe9\xfc\x03\xbar\xb4fM\xfa\xfa\xf7[\xed\x8f`Q\x90\xa0f\x12\x1b\xdc\x88\xc9\xcfC\x98\xcb\x13\xd0\xfb\xf0T\x8d[r\xcc4\xe1\xd1\'\x95s\xc4R\x95\xea\x94\x97\xcfi:\xf8$\xe3\x82\xebnP\xf0\xf5/\x86p\x01\x96\rv\xe5\xfe\xad\xdb!\xab\xb7l\xbb\x81J:\xdb9;\xb9\xae\xc3\xcc>\xe9\xb0\xa8&*\xe1\xeaQ\xfe\xf7!\xc3\xc4\xebE\xd7\xf8\xbeYK#\xb1\x1d\xbc\x0f=9\xa4\x01\xaeoO[\xf8\x89\x19\xfcES\xc1\x80GS\xc9\xd4\x06\xb4\xfa\x7f\x07\xd8\x0fLf\xab\xc4\xb6\\\xd9#\xdc\xb6\x19\xf0!\xf4Na{\xfcv\xc8\xbd\xa9\x9d*\x14\x12&\x9d+\x92\xbe\xe0\xae"K\x0c\x99\xf6\xf2*\x02]+\x02\xea\xe3\xc0\x97L\xea\xde:\xe1\xf5\xcd\xaf\x1a@E\x1e\x08\\b:\xd3\x9aa\x92\xea`Z\n\x7f\xb6A\x94,\xd0\xb0Y\xa3\xaa<\x0e>\xfa\x98\x1b8\xb3z2\xd4\x93\xaf\xb4\xa3\x00~%:\xcb1\x91\xcbO\x90\xe1\xf4\xcf\x17SF\xc8\xbf\x80\x9a\xf1\xc2\x85bS\xeb\x1b\x1c\x12S\xef\xa3\xe7\x0c\xf6u\xa8\xea\xd1\xf5\x96\xb5:2\x0b\x9aN\xdcMy\xba\x87\xc2\x86\xc6*/<\xe0\xa1/)\xf0\xca\xb3j\xd2\xb9\xdc\xa2\x8f\xdcU\xcb<L\nB\xb8\xc2\xcdU \xadZj\x16Q,4I`\x1e\xdd\xe0\x8f\xe15\xa2\xc3\x86\xbd\x81CG\x1a\xfak\xe3\x83R\x17\xbe\x83o\xe9`\xef\x87\x1d\n\x97\xfc\xf4\x0cjUx\xb5\x0b,r+\xbf\x06F\xce\xb5\xfan\x9f\x0c\xd8\xae\x892\x02\xbd\xeah\xc0\xb9\x8cH=^.Q\xf5\x1a\xea\xad\x86\x12;i\ttN+"\xce\xe3\x9b\xa7\x1b\x8d\x81\x05s\xa5\xe4>\x99\xe2\x8dU\x81\xab\x9e\xd9kb\x1d\x8f\xc9\t#\xb1EI\xe9\x9dwG?\xa0\x1e\xc7\xe5\xcdP\xd5L\xa7?\xcbHTP\x88\xc9\x06{\x82\xc4c\x04\xf6\xeb\x18J\x95\xb1i\x95.\xe8`\xc0\x92\xa9\xba?\xb0\xa5\xb56\x0b\xb7@!\xea\xdb1\xd3ot-\xc8\x0bf\xa9\x11K\x9a#`fuz\xaa\xa5\xba?I\x007\xa6\xdb\xeejE\xfdt/7xm\xd4\xc3)\x01\x9d\xec\x1b\x98\x0b\xed\xb9\x86\x81\x89\x90\xa8\x00\xe3\x94l\x14B\xa3\xe3-\'\xd7V\xc3\x94}\xf2q\xc2\x91\xb0-g\x1aa\xbc\xa7\x9f\xe6\xc8\x8b\x00Z\xb4\xe2\xa6\xaein*\x8b\x86\x11\n;:*\xcd\xf3\xe2\xec\xd4\xe3z8\xf4\xd2m\xc3\xaa;\x9f\xa0\x02\xe2w\x9eO\x8bUL\t\x90\xd7\xde\xbb\x18)#5y\xda9\xc2\x7f/R\x98\n\x00\x0f@\xf0\xbf\xf0\xc6\x18\x11\xae\x19_\xe0\x89\x8a\xab\xb4\xdd4\xab\n\xa3\x86\xd9F\x00\x0f@\xf0\xbf\xa2@\x0c\xfa\xe0\xaa\xf6:\x965\x06?I\xe5\x8f#\xf1\x83\x05B\rt\x0f\xb6\xe1\xad\x87\x9e*[IH>\xac\xaa\xe5\x91\xc5\xee\t`\x91\xad\xb8\x86>\xbff\xbc\x8d\xe4\xf8\xfb\x82\x93\x18\x92\xb1\tH\xecH\x11\xee\xa9\xaeQRT\xac\xf8k\x81\xb1\x96\xc5\xc8"\xf9H\xceKi\x90\xe9\x84&\xda\xb4Q\xd6Ai\xe4/73d>\xd60\xcf\x94b\xe4Z\xfe\xda\xe8\xd3\xca\xfbW\xfa\x11\xed\x05=z\x00\x0b@\xf4\xbf\'h\r=\x82\x11\x90u\xe9r\xceg\xf2\xa1C\xfay\xa3\x98@T\x9cC\xb0(\x8a\xb2,\x97m\xdb\xb6m\xdb\xb6m\xdb\xb6m\xdb\xb6m\xdb\xb6\xb1\x97\xde\xfdg\xf6\xa7Y\xa3\xee\xe8\x8e\x9a%\xe1\xad\xed\xb6I\x9a\x05\xb8\x1fL\xc4c\xd2\x9a\x9b\xae\xb5\x98;\xfap\xc7\x1c\xda\xb2$\x14\xf6m\x82\xc7f)J}*\x8a\xebm\xf1\xb7\xb0\x86\xc81&\xc9bM\xda\xdb\x93+b\xaa\xdf\x91\xbb%7\xc7\xdb\xe0\x0b\x9d\x07]/\x1e \x1f\xd7\n\xba\x03\xde\x8ekV\x17Q\xba<\xdbQ\xa1\xf7\x81}!P\xc2\x1e;\x96\xe1}H\xe3O7\x19\xa9\xd1\xe4+\xdd\xc0\xe9%\x80\x88@w\x7fI\xa3s<\x9f\x8a\xf2\x12\xa0\xad\xb1\n^3\xc2\xb3\xb8\xe7\xa0\xe6\x7fA\x87\xeb3!{\xf6\xf3eQ,\x7f\x7f\xf8\xe8\tb\x9e\xf7\xbc\xb6_\xd7\xf9\xce1lM\x96\x9f\xe3\xe9\x86\x19\x05\x08\xc2\x14UT\xd55\x07+\x86\xfeYD\xbc\xa4\xfe\xc3\xe6\xf05\x1cB\xe398\x89\x0b?8k/\xf4id\x03\xb8\xafb\x84\x9c\x90iUzg\xd7b6\xe8F\xa4 \xc8\x006\xbdQ0\x82\xb5\xfdy+\x06\xf7U\x99\x01\xc7\xd0g\xaax\xf4T\rE\x146\x8c}\xe05Ok@#\x00\x8f\xc0p?\x1a"\xcf\x9fus\xd1\xc7\xf1\xdc\x06\xe0\xa0\x0f\xf7\x7f,`{\x9d\xd5\x93\xfbN\x88;4\xa1]\xd7\xc0\xb1\xcd\xe0\xddY\x05\x9a \x1bf\xa1*\x85\x1a4<\x06A6os\xfa\xf8\xe5$U\xa6E\x93W\xa3Q\xc4\xe6 |vw\xe8\xa7\xf1\xaa\xbe\\_\xd7\x85\x96\x7f@_\x1c,\xb8\xe3BV\x97\xb9\xc9B\xa4\xf2}\x15\x92\x112\x81|g,\xa2O\n\x8e\xdf7\xb8\xc8\xa3\xf8\xcf\x03nM\x8b\xb4p\xccP\x0ee\x00\xe5-L\xb2\xdaJ\xaa\x9f#&\x02\x80E`\xba\x1f\x1c8U\xca\xc4-*\ty\xb3\xd8\xed?\xc0\x87d\xa80A\xa5-\xb3\x06\xc5\x04_\xc0\'t\x94\xb7@\xcf\xcc\xd1\xadI)&Ak-\x99\xd3[\x99)\x1c\x8bz\xa1{;\xad*3\xc1e\'\xdcc `\xe9&\x824\xabLV~\x05\xb5\x86\xf2%t\xaaX\xed-P\xbf \xd6\x146\x81&\x9a%0;\x83\xa3\xb7\xb2\xa8\xe7\xcb\x84\xf8SB\xc9/Va\xb7 ja\x86\xdb\x1d\xea\xd5!\x9b\x0e\xbet\xe5\xd7 \xd8"\xa5\xd1\x9d}\x14\x14\x8f\xba\x05\x07\xcbd\xf1\x9fb\xa1K\x8a>j_\xc8\xd1(\xd5\x87\xb7\xaf6\xa02\xf5?X\x189\x1a\t%\xb4\xe0\xde[\xdf\xcf\xf2\xf5\xf4\x96\xed\xaf\xb9l\xd7\xb98\xeavtQ!\xd6Lv\x84\xc0\r\xc2\xe6\x0c\t\xaf\xf1\xd5\xd3w\xc1B\xcf\x9bo\xab*\xa4v\xad\xce\xfc\x93\x946\xfc\xf0gb\x18,N*\xfd\xc4\xfb\x18C3\xfcb\xb7\x1b-\xad\xb4\xcb\xbb\x96}\x96U\x10\xe3\x0f\xe9\xa8G\xde\xf0\xa1P(\xdb\xfeA\xfd4&!\x82\xef\x81\xb8\xd5\t\xfd\xba\xa0\xa1\xe8\xd6g\xe1\xb6\x1d\xbf\xdb\xaf\xff\x9fl!V\xf7I\xc6^.t6\x0e\xfb\xa4+,\xab\x83\r\xfa\xe3\x02\xea\xbf@\x02\xac^\xe85\xde\no\x92\x9ba\xcb\xadX\x7f\xef\xf1U\xfe\xe8\xa1\xd0\x19kw\xf9\xa7\xe2D\xae2uJ\xf9\x9f\xd2\x80\xff@|\xf5OBY\xc4j\xb1\x96Y\x8d9A\x92\x97\xbe\xc0\xad\x1a\x15Iw\xb5y\x8a\x02\x18MfJ\xdd\xb4Y\xaeK5\x933=e\x8a\xb8Q\x0e\xc3\xa43\xccn\x07\xef\xca\xf6\xb3\xc6i8L\x95\x91\x90\xf2\x9b\xce-T\x8d\x94h\xc1\xfd+\xef}\x0c\x00\x9d\xc0b?|\xce\xb4f\xde\xf3\nK\x9a!\x8cB:\xceQfpq3\x18\x00\x95\xc0j?D\xb8\xeb}yDA\xf1\xd6VJ\x8e\xc1t\x1a\x1eN\xafJ8\x89\x01X\xf8E`og\xff\xbdQ\x90\xc1o\'\xd0Q\x80\x14M\x8b\x1b\x98\x06.\xa4\x88\x05z\xc9\xc6\n\x8f\xcc/2\xbe\xe6>Y\xc8\x12\xad\xb6F\xffbt\xd1O\xdf\xa2e\x8dO0f\x19l\xc4\xbc\x8bn\xae\xba\xd0|\x8f\xc6\xda@\xfe\xed\xe5\\X\x82Ak\xc3\xc3\xdaG\xf5\x08\x80\x1dYH\\\xe4`B4\r\x00\x99\xc0f\xffy\xee\x8a\xa2la\xd8?\x15$\xd2\x87\x8a\xdd\xbd \xd5\xd1;\x1a\x00"\x81\xdd~\xe5M\x82\x81\x80\x0f\xca*\xb3\x1a\x16w\xc7ET\x8eS\x8c\x18\xcc\xfd\x83\xd2\xcd+\xb4\xb5\x88\t\x87L;\x03\xce\x8cW\xdc\xfbx\xae\'X?\xca\xab\xfc@J(\xe37t\x90\xf9:\xdff\xb8"\xf6S\xc4*t5d\xfe(m\xec\x9c\xc2\xd3\x1f\x1f\t\xf5\xaf]+\x84\x1f\xe5\xb8\xda`\xb8\xfe\x83.\xdd\xdb\xde\xca\xcd\x04\xc2\xafj9b\x8d\x9f\xd1IZ\xef\x1e\t6^;2\x85\x9d\x9cU\xbch\xd6a\xd6\x19i\x99q9gKw)\xc4XG\xba:K\xf1q\xa3\xf3\xe3d\x0b\x85\x16\xb8\x95\xaf\x15\x1fq\xda\xfa\xd2P_\xc9t\x12\xe7\xcd\xc3!\xeb\xa8\x82\xc5Z\xec\x16\x15\x0f\xc4\x05+\xd3\x0f\xc8\xda\xb2\x07\xb0\x00B\xef\x9c\r\xed\xb6\x03\xc8|/8\x90\xc1\xffV\xfb\xec\xe1a\x89\xee\xe0K\xfex\xe8\xf7\xdf\xc87|\xd8&j\xa2\x9c\x9d\x0cLc#\xfd=\xd8\xe8e\x8a\'A\x1b8\xbb\x8cx\xfa9\xc3\xe7\xda\xdb\x80@\x88\xd6\xb7\xbc\xaf\x8e\x9d\xc2\xbdGfQ\x04P\xb3\x93#\xfb\r\x13\xb2-\xd9\xbf\xe4\x8fI\xbc\xef\x8c]-\xe0\x10\x00\x16~\x16wQ\xea\xa7)#\x96\x05{\xe09\x8b\x95\x84^n\x06\x81\x97\xf5\xe1\x9b\xacH\xbah\x1c6\x13\xfa\xefH\x04\xf22\xb2\x8b\xb4U\xd4\xd3q\x82\xa6\xeb\xa3\x98\xff-5\x91\xb9\x1e\x1cn\xd9B(\xa5\xec\x8d\xc7\x9a3\n\xf5?\xc8\x82)=\xbb\xbfnuX\x03Of\t\xd4\xd7\xdcsN-\x1dT1)\x8b\x07\xfe\xb4\xc2\x01_\xd5\xf23\x03\xc3\xee@C\x17\xca\t\xd2\x04\'\x96\xea\xb7k@\x8a\xb8/\xdf\xf6\x13\x1d|hx\x04\x01H3\x80\x07/\xa3\'\x947\x12J\x1d\xf3\x80\xcb\x8f\xda\xa8\xa7\x9e\xb3J1$%\xae\x9di\x9bs\x8e7\x19\xcd\x06"b\xfeRj\x01\xce\x0f\xfb\x86\x07J\x18pX\xc4\x9a\x07\x87\x1b\x89\xdb\xb3-\x83\x9df\x11\x13\n\x1b\tz\x01s \x89\x0e`e{\xa2\x96;y{\x9e\r\x80/p\xd0_\xb3L\x96\x11\xbd\xdf%\xcd\x08\x80-p\xd2\xbf\x8ckC\x1bqQ,\xed\xac\x84y\xfe.\xef\xaf\xa6R\xac\x8b\x19\x8a\x0f\xeeK\xa0\xcf\xe8X\x9a.\x0f\x0fD\xcf\xf9\x87m\xd1c.\xb1\x14u\x97 \xb0\x94\xb5\xe5\xbf\x10\xb8~\x9e\xb0\xba\x83\x8e\xd6z\xbd\x8b\xc3F\xce6\x0b\xb8i\xdd\x12\xed\x0c=\xd8\x84N\x81\xbaD\x9b\x150J\x8c\xd1\xe6^\x16\xbf\xd8nN1eRo}u\xe4\x80HV\x0eT\xd3\\\xf5\x94%\xd4^3<!\x84\x99\xde-91\xf1\xea\xba\xb7\xbd\x01\x80.p\xd1\x0f\xff\xb2\xc6|&\x92\xa6\\4\x85\xff\xa5\x8d\x01\x874\xde\'\x9e\xff_\x10\xf5\x9a\xbfV\x16\xf7\xbf+\x05\xe6\xdf\xdc\x0e\xd1\'\x96\x8f5\xc2\xb2\x0fw\x12\\\x88\xcc\x1dP\xbd\x9aW\xde>5"\xe1\xdf\xf3&\x1c\x80<\xfc\x15i\xc4\xe1\xf7\xb2Y\xden\x01~\x19\xeb])\xc1\xdf\x00\xa5\x83>@\x81EG\x8f\x14\x1f\xa7\xce\xdc\xd5Do\xb3n\xfa\xc7\xac\xd0\xa2\x9cn\x8b\xec\x1c\xc8`8\x15\x94/\xcbd]\xbd\x1e\x85\x1a:C\xc1\'U\x05\xf9\xf7\x01\xa2\x88\xd5P\x95l\xbb-g\xf4\x85\x9fw\xed\xf6O\x80)\xc8\t\x08k\xafz\xae\xfd@\x10\x8e\xebG\xe36\xa22LE\x9a\xf0}\xe4\xb6.\xf7\xea\xa5\xf1\xf63\xcaV&cC0\xfa\xeb\x0f\xf0\xff\xc9\x1c\x06J\xee\xf6\\\x9c\xd2\xce\x15\x0b^_pr\x84c\x18`\xea\xc5\xceq\xff?\x97A\x9b\xad\xf7\x9d\x7f\xbf(:o\xf69\x89l\x96\x8br\x07\xf7\xa5\xe2\xc9\xcbh\xfb?\xe5\xfd\x9b;8\x1e\xde\xae\xd7\xcb\x8a\x9b\x170\xc0\xeb}\x97E\x86\xae\x8af\x8b\x03Tt\xed\ny\xb5\xf4\x81\xf6\xa3z@Q2W\xb3M\xd8\x7f\x14\x99\xfeJ\x1c@\xe5\xb6\xff\xe2\xd1\x0f[\xd1 \x0c\x9b\x01A1C\x1b\xea\xc4\x82fq\xf1\xceA\x88N\xe0\xb4rL\x19/Oq\xcc\x0b\x14\x89\x83\x17\xc1\xbf\x0c\x83\x8b\x17)\x8d\xde\x96\x0e\xb3\xfc\xef\xbc\x86\xcc\xb3\x89$\x89%\xb20\xe1\xa3\x17p2l3k;\xd3\xaa\xf4P\x01\xbe\xbf}}=Q\t\x9e<ycT?\xc8\x1f\xb7\x0b\xff\x10g\xd5\xa3\xfd\x8f\xd8<\xca\xc7\x0c\x14]=`!\xe4\x9b\x05\x1f~\xee\x84\x9d\xa2\xac\xc0\xf2\x8e\xbc6\xe8\xe6\xbbJG\xd6\x93\xbb\xb3\x7fG\xf5\x9d\x0f\xdbAa\xf7\x84\xbcn\xe3\xb9\xca<o\x8dzl.\x9e\x936j\x85]\xc7b\xc6\xc2F\x86\x88\x0c]K\xbe\xb5g\xcdQ\x12\xd5#\xe1\x0b\x86\x03\xbeg9`\xcd6\\\xffI\\a\xfd\x88\x08H~\xe3h?\xd2\xbd:a\x03\xec|\x0b\xfb\x9en\xa5\xcf\xdcx\xbaV\xae8J\xff\xfe\x01\xa6N[\xe5\xa0\xb5\xe9q\x0f\xe3\x9b\xecej\x8c\xcc~\xbbBk\x14\x0fE\x1e\xad\x12#\x04\x1f\xbe\xc6\xea\x1d4m\'\x94\x8dx\xe2\x8e[\xd1\xdd6\x85\x9c\x80\xad\x0c\xa4\n\x1f\xd9E,\x8e\xaf\x00H\x188\xf2Ly\x95\x026\xb9\xc7\x02\xe8\x8a\x06\'\xf2U\xd1\xceU\x17\xb07\x81\r{r\xf6\xf9\x8eG\xe2\xd709?\x17\x16\xa2V\xca\x88I\x80f\x87{\xd5_e\x0e\xa0%\n4\x95"\x18?\x1c\xb8\x8d\xf03\xe46\xd8\x10\xeb,\x94\xe6\xb7\xc4;T\xadp\x00\xb8\xfd6\xf6\xad\xcf)\xa6Uc\xc6\\\xfdeq\x9a\xb4\x96\x8a\x9c\xf6cv\xe8K\\ ?\x89]\xccX\xcc8\xbe\xc3\x9e 2\xc0\x18\xe9)s\x17\xba\'\xed\x81\xec\xf7L"\n\xa3M\xcb\xdev\xa7(h\x1c\xb6\x1b\x96\x8by{\xe7\xfb\xa0T\x15*\x9b,\xd6\xeb\x9cO)R\x8a\xe0^\xea\xb2%\x1f\x19\xe1a)\x81\xf4\xb4\xa6h\xe7rv\xfe\xda\x94\x8d\xa9Z\xaf\x96\xf8\xe1\x93pL\xdfi\x8d\xf7:\xaet\x1c\x96\x96v\xc2\xa8r\xa1\x08Y[7\x95\xb7\x8f\xed1\x01\x98\xfdV\xf2aXH\xb3Z\x90\x85.\xc8\x8b\xc7\xc5\xfe\x05\xb8\x0eUbS\xdb\xb2wa\xd2\xbe\xe0\r\x9d\x7f\x05$\x03Tc\x82-H\x84\xdb\xad(V@wQ\x0b4\x16W\x96\x1a~E\x04\xc0Y\x8c\x9e\x02\xd4\xda\x087@P\x9d\x89S\xce\x01\xe8\xb1@\xf0\xa8\t\xa2X\x112|n/\xdd\xcd\x0bAR\xbf\x1eH\x07\xc2\xe0\xab=\x80\xa1\xa8=\xd7\xf7\x0f>\x06\xbe\xc8\x06\xb2j\xd3\xf4*$\\\xc2\x8f\xaf\x91\x90\xaa\xd1\xe8\xd2\xe6\xe2\x9eF\x08\xfd\xd3\x94#\x93\x986&\xfbf\x80.\xf5\xac\x93\x04\xc0\x03\xa0\xfe\x13\xd9\x87\r\x1e\xde\xec\x7f\xdc\x81\x02\xb9\x8e\x15\x03\x10\xfb\xcd\xec\xcbi5\xc2\x1d\xb1\xd9\xb1\xce\x01\xb7\xeb7\xa8\\\xe4\x15\xe0z\xa7\x91\xa9\xc8\x7f\xb1\xb9N\x1c\xe6E\x9c\x85 \x88\xf9w\xe9)t\x9f\xd0C\xb9\xa4[\xd9\xccN\x9a\x9f\xab\x98\x97\xed\xe7\xa4\xae\x06\x7f\xfe\x85\x8f\xc8Dat\x1a\x8c\x8d\x97\xe9\xd9u\xd3\xae\xb8\xfbT69\x8aP\x1b\x07\x9d1\xaa\x1fZ\xc0\x1d\xf1:{\x18\x9bY\x14\x9a\'[\x97\x05\xdd\x18/\x9f\xa3\x81"\xbb\'\xf7 \x19\x17\xc1\xaeS\xe6\xbc\xd7)\x8eS\xc59FeL>\xb1!O\xed\xc6T\xa5\xc2\xfb\xf1\x17\x92\xa1\xd9\x90{\xe5\xb0\x11Q\xf6{\xaa\x05\xbc\xf0D\xff2\xd9\x89\xf6\xa6o\xd4\x8121\xfbG\x86{LO\x90\xd6\xfb\xbb\x00\x0f\x0e\xa3~ \xbb\xa5\x9e\x85\xda\xa7\xf5a\x99\x02W\xb6\x08\xe9\xcb\xb4\xa0\x1f\x92\xb6M\x9d\xfc\x81\xd2.?\x9b\xf5\x97\x92\x01\xa5\x11\xae\xce\tT%E\xcfG\x17%\x08\xf7\xb5\xaddT\x0c\xf7F8~a\xfeyYB\x85=\x08\xdb"~h\xf2\xaf_\xebV\x9a\xd0\x1c\xbc\x8c\xfc\xdb5v\xbb\xa88\xdaY\xf4bI\xf67\x1d\xfbh\xcd\xb2~\xc6\x83\xa9$\xe6vD\xdf\xce\x16^`\xb2\x7f\xa9}\xd7su\xca\xdb\x88\xeb\xb0\xf8\xd3z\xfas*bE\x88\x7f*\xfaq\xbaI\x1d\xc5\xa2\x8b$\xa4SQ\x0b\xecqn\xd1\xbcpa\x8ek\xf5\xec\xae\xa0\x05\xaaNS\x0bM\xd6\xda\x1cM\x19a\x8b6U\xc6\xf5\x9a:\x96\x95\x0bb\x84\x15\xe4\x95\x17\x8f\xa4(="\xd8\xd5\x14Z\xbe\xdcS\\\x97/2\x96\xdd\xb3\xcf\xaa{\xbf\xb7\xacc9T\x15N@>\xe6\xf5\x94M\xa4\xe7\x96\xcbQ\x02\xb2~\x8b\xf3I\xff\x7f\xed?\x99Q\x8b+\xbb<;\xba(\xa7\xd0D\xcf\t@\xf9\'\xb6\x8fN\xad\xa8\xfa\xc4\xb8V@o9\x08\x0e\xe3\xa4!\xd06*\xc2xhW\x96\xce\x0f\xee{\xb1\x9d\xe6p\xf2\xa6b\xfe\x9eKIo\xf8[\xc1\x86F\xf1\x15\x99\xcf\x10\xeai\xb2\x06C\xfe\xad\xbdP(\x9cGY\n\xef\xccM\xac\xad\xe0c\x9cY2\xa8\x83\x8b8\x00\xc8\tMn\xde\x90\xfa\xf7\xa9W\xab\x7f\xeae\x9c\xb0\xc5\xda\xa0\x14\xf2\xf3\xe6\xdcc\xc7\xaeb\x83\x02\x93\x95\x97\xb7\n\xd7=]\xe2\x15\x82\xa7\xe2\xad\xbc\xc7\xfa\xe2\x0fu\xcc\x9a\x93\xd4\x9c\xfd\xf1j\xc5` \x9d;\x93\n\x06\x10\xf2\xcd\xed\xbb\xa4\xa5\x84\xff\x97W7_\xa8PrJ1i\x16\x7fL\xd4\x1f\x19t\r\xbc7\x10fHt\\\x14\x86\x1f\xaa=\xbd\xc7~\xd0r5\xcc\xec\xa8\xcbN\x9b\xd6\xb0\xd4\x90+\xac\x8fZ\x1e\xa3B\xfe\x06qy\xaec\x94\x87\xa6T\x81L\\a{\x04\xae\xed\x03F\xec\xa4\xeb\x85|d%\xbap\t\xe8\xc7\xdc\xf5\x93j3\x04\x9e*L\xbf4\xa9\xbe\xffK%^\x1d\xda\xd5\xc4~j\x06\xf5t\xb4\xa8 \xf37-\xf7\xc3Q0\xd8\xb1\xe5\x81\x07/\x9d\xb7\x10\xa3 \t:p\xca\x7f\x01\xbd\x0e\xbb\x90\xfd\xc2\xaeQ27>\x01\x8f[\x0eY\x97>?1#v6\x15\xcd\x8d\xc8+\x80\xe9}\x97\x1fq\xcdr\x17GfS\xc5{#\xfa\x0e\xf8\xa4\x17>f\x88\xd3)\xfa\xce\x1bk\xae;\xac\x87Q\xde\x0c\x1a1\xd6\xe9\x0f\xba]\x10~\xd3\x1d\xc4\x93\xebZ\xa0S\xf4\'\x8a\xdckUF\xc9\xc4s\xfd\x9b\xa8mn\xfaZ^JT\xd3\x0cI\r\xd0}\x92EH\xf5\xc0$\xd3\xf2\xef\x14\xe3:"}Z\x9e\xfdS^\x008E\x9c\xf4k+\xb5\xfb\xb3\xfe\xcf\x0f\x87\xa4\x1b\x11 \xfb\xaf\x82\xbe\xba\x1a\xbb,\r\xe1\xfcF\x0b\xf05\xcf\xa3\x93\x97,\x97h\xcdX\x06\xb8\xe0\xd8\xaa\xa0r7\xf6\x15,\x9a6@\x88\x96\x05\xd9\xf0\xb5\x85{\xe8/\xcd\x90\x8d\xd6z\x18\x1a\xb5\x06S\xf3\xaa\x11q\x89\x10c\xdb\xfag\x0fH\x9e \x98q+\xc9\x7f\r\xb2)0\xec\x1f\x1a\x06J\x199*\xd9\xd6O\xec\xf71\xc4dif\xb0r\xc9\x9d\xb2\xebX\xd9n\xf0\x8d\x96L\x81\x84\x16\x1c\t\xc3\xed\x17\xf6\xb2\xeb\\\x05:5P\x91E\xad\x16\xc3{v\x19\x89V\x1c\xe5\x0c\x85u\xfa^\xa5\xa7\xc9\x03\x7f\xb5\x02:\xa3z\xc7sd\x03\x145\x8b\xe1\xcb\xd4\x08\x0e\x80\xcbos\x9f\x92%\x99\x1c\x93\x18\xc7j\x14\x15EUS\x0e>C\xac\xa7\xba\x9cpqS)\xa2\x04\xa4N\x8f\x97\xcd;\x98X\x1b\xd86rm\xe5\xce\xa9&\xa9dXbh\r\x0e\x7f\xa1GT\xc4\xb4{`5\xc4\xbf\xcd\xa3}\x92\x90\xea+3\x1b\x15x\x19I\xb0\xdb--\x9a"2\xd6\xc5\x98Kp\xb5\x13_\x18\xf8\x9e\x19=\xc8]\x14|s\x1ai\x0f\x11\x812\x90hN\xe1\x85\xb5\x90A8^\x90\x93)\xb8T_D\xaa\xcc\xf9\xb9\x9d\xf0$\x87\x12\xfe\x9c\x8d\x8c\xa3\x13\xfe\xb6\xa8\xab\xec\xfe\xbf>b\t\xe3\x0eF*\x8e\x16\xc0S\xbcO\x0c="\xeb\xdf\xf1\x8aM\x95A\xdf\x94/w\xe1F\x87\x04\xa2\n\x96\x10\xcb\x93V\x1fU\xb5\xe3\x00YV&2v\xb6\x13\x16\x14!\xbf\x94\x04W\xd3M\xbc\xb1-\xd7\xb0\x89\xa5P\xaf\x17\x03\x1f\xe3\x04M\xe8`>c\xc2E\x86\xf6\xf8\xf8\xd3v{\xfeOU\x0bvj\xdc\x99\x8b\xed\x18\xae)G\x98&\xa2l\xdd\xdf\xbb\t4\xfd/\xd4w\xe3\xe2\xd9\x8d1\xdd\xb8\xd4\x0evb\xcc4+yN\x8f\x0f\x0cdM\xe2T\xae\ri\xd6\xfb*@\x83O\xfc\x7fN\xc5\xd7v+\x9c#\xe2\t\x8d\x0e\x9c\x96\xbf\xc4)\xe1+Y\x90O3\x86\xef0\x118\xcb\xd50\x93\xa1\xedL\x9b\x02D\xc4\x1d|\xff\xf4}\x00\xb2\x12\n\x80\xda\x82$\x1fS\xa4\x12|\xaaR\xee\xd174\x95lQ\xd9\xa4k;\xcc\x95\x94V\xacgs\xd1vW]-fGh\xf0\xbd\xe8\x95\x8am\x8d\x1bV\x04L`"\xb4\x14nw\xd2\xb4\x01qIo\xbct\xd9\xc8\xb3\n-)\x1d-G6;\xad\xd2\x0e\xb1\x17\x908j\x8cBOy_\xf6\xb5\xb9\x83\xc2\xb8\x0c|\x123\x00\x12\xbe\xe9}\xdd\xce\x05I4\xc25`S\xd1B\x91r\x9b\xe8\\#JJ\xbem\x9aJ\xc8^Zpu\xfd\x8a\x93\x9e\xa1\xb3\r\xf6\xfaY\x95\xf1\x80\x84F\x8e\xa0X\x02i\x85\xed|\x05\xa6\xbf\x93)\x93\xed\xc1\xce\xab\x9e^\x87@\xde\xc1\x05\x1c)\xe4\xc0-k$\xd7~\x9b\xa5\x88\xb8\n;\xa7\xcc`\xb6\xcd\x9e\xca\x95A\xe9\xa6>\xd3|~\xc6\xe8\xa6\xef\x9c^\xa1\xffJ\x15g/\xf0\xdb\xda\xd6\x92T\x86u\x87\x11_\xc8\xaf\x1d\x16\xb0\xbe\x1e\xc9\xb8\xab\x1e_\xd1\xd3\x94\xdd\xc9L\t\x03\xca\x90rO\xfd\'\xa5\xbf\x0b\xfb\xb1\xe1\x92\xa6\xdd2\xec\xe0\xdc\xf6G\x1b\xe0Xx}\xfa\xf5\\\xe4\xb2\x1eSG0[\x9d*<\xf5wP\x84\x1e\x95\x8c\xb0\x8di\x14\xc9\xbd$\xef\xb7!\xee\x98\x05\x8e\x1dG\x93\x9b\x8e$\x91\xc6\xfe\xe1\x07\xcf\n\x1a!\n\xb5}@p.\n\xd1ck\xdb7ni\xd9\xb7\xee9\xb0\x9f\x18\xd8\xb6\xfe\xfao\xdat\xbe\xf5\xa9\x9a\xaa\xf9\x04\xb0\xe8\xee\xd0erV\xa4\xa9v\x8d~{\xdc\x16\x0cw\xac\x9fM\x0f\x960\xdd\x97\x1d\xf9\x08\xda\xbd\xe4\x07\x8c\x11m\xe8\x0b5\xa2\x92\x06\x1b\x902\xb7\x95\x8f\x1bT}1\r\x1a\xd3\x96\xcc\x89\xb7\x13\xff\xb5y\x9dq\xd7\x1d\xc3IO\xccbz\x8drP\xd3\xb7\x91\x8d\x96)\x97\xf0\xfb|H(Z\x0e;\x9e\xbc\xba\xa2\x06\x93\xf9\x01\x8c\xd8Y\x96~\xc0WuV\x0b\xa5\x88.%#\xd7~\xa3\xd9#\\z\xde\xed\x17I\xf5\x8e\x90G\xc9F\xeb*\xaf\x82t\x87\x17\x8a\xae\x8d\xaeV\x9dd\x0e]\xe2\xc7\x8d*\x13<\xfce\xf5\x17I!\x84\xaa2\xae\x96h\xff=\xd3\x81\x9a\xc8f\x1b\xaeR\xfd|\xcdU\xb3\xac<\x13\xc5\na\x95\x05\x88\xb0\xd0B\xa7\xed\xbe\xb2\xe43\xedi_\x03\x0emb\x8f\xe2\xe6E$T\x0b\x7f\xad\x0b!\x0eM\xa7\xa7\xbb\xab\x03\x8al.\xb1o\x7f\n\xe0\xe2\n\xf5(|\xd5x\x10\xfd\xde1\xc2\xe4\xb33#\x0e\x9d\xc4\xce\xe6\xd2r\xcb\xd4\x9fC\x94\xe5y\xc47\xf4\xfdP\xda\x1a\x89S`\xabQ\xa9\xff\xf8\xdaO\x8f\x1d\xc6\xfd\xaa\rz\xbd.\xe0o\xa0\r\xe9{\x0cPb\xe06\x00$\x17T\xd5\x0c\xf9wf\x07\xdf\xd6\xfb\x14W\x0e\xd7\x02Q\xfa\x8b\xd2L&\xa7\xc8\xf9\xb9,\x848F\xb6\x92\x81\x83*S\x07\xfaB|\x9f\xbc\x03\xfb[\xa4\x12\x15U/\xaa\x0b\xb7x\xb7\x88\xe2\xb9\x0f\xd7*\xf7\xca\xdbb\x12\xaeA:&\x19\n\xf3*5y\xc5\xd9\xb6\xb8P\x12\xe8\x1e\xd7\x90\xeblk\x86\xad\xb8:\xd8\x02\xe2\xf9\xc6S\x04\xbc\x8e\xe6\xfb_6\xa8^\x8b\xb1e\xd7\x1f\xd6\x8f33\xd9\x88\x95\xc3\xe5L\xf5\x17\x1bp\xa1e\xec\x8fop;\x12\x9b%\x89\x06\t1\x84\xfc\x8e\xed\xaf\xe3\xce\xfe\xfbh{\xda\r\x13\xb7\xa0\xee\xdd\'\xc6\x0b\x0bL\xb9Q\xb7{\xc9E\xd5\xf5W\x18+U\x80\xca\xbap\xd3\xdb\x07\x1d\xf86\xe3\x8dA\x9e\xde_O\xd1\xd1\x1dOJ\xe7Bb\xa0q\xc5\xd8;\xc1\xb8\xe9\x96\xbcC\xbc\xf0_\x0f\x8f\xb4\xca\xfbw2\xddU\x92\xa6\x86\xce\xbf\xa2d\xad\xda\xde\t\x1b\xf83\xbf\xb4\xfc\x17\x9b\x16N\xb0\xa6j\x8dI\xf7\x15\xe7\xc9\xf7\xeb\xc7@)l\x93\x9e\x8a\xd6\xa8\x9a\xa2\xac@Dx\xf9\x9bj\xf2FD\xa5\xd0\xf1\xd6WdZ\xa1\xbez\x87\x04-\xc2\x02\xc4\x8c\xb2\x9b(\xcb^\x0e\x11j\x82\x92a\x88j\xb8##S\xf7\xd0\x83@\x10\xe7\x93\x9b\x00\xe0\xbbz\xb9g\xfaq\xfa"-\x89m?\x14\xe8[[\x18\x1c\xdd\xe1,y\x9b\xa6\x02\x80\xc1\xc4*5+\xc7\xa2\xca\x97\xf0+jW\xb9\x14,\xaa\x87\xfa\x8a\xa2Auqi\xbbi\xda8\xf3K\x95(\xbaqC\xdf\x9c\xb9\xee)\xc4\xf3\x03z\x92\x7f\x9as\xee\xdb\'\xef\xf9\xbe\xa6es:S\xb9&R,\xe8\xf19\xa9\x01\xe2\xe9\xe59\x14\xa0\x04j\xbf\xecRyy\xa8\xab\x1e\xec[X\xe9\xdf\xfaS\x8e\xbb\xd3\x9d\x8d.\x08utq\xa4\\Q~\x07\xfd\xb3 \xae\xfcC\xaa`=\x83>mx\x9b\xf3/\xe0\xf4\xc4)\x18\x8f\x98|\xa0/\n\xfd!\xb1&\x8e\x81S\x14\r\xc7\x83\xee\xef\xd6T\xbf\xd4/a\x92\x0c1\xed\xcc\xbbq\x11\x10[-wz\xf2\xc4\xd0\xc2`\xb3\x9e \xe6\xb1O\x90\xcd-3\xd9\xa6\x8f\xc2~\x7f\xfa\xa8?\x99\xef]L6\xaab\x9ai\x9ce\xb99\xb56\xe2xn\xbeVW-t\xd7z4\xb3D\xd3\xc9\xd8\t~\x82(\x19w(QK\x0ck\xbaa7-\xfa0\xc9\x9f\xb1\x8a\xb9\xe6(\xf0\x1b\x86\rJ\x1e\'/oo\x82^\x9b7\x7fUi\x19p\x07\xef\xb7\xd4L\xdce\x9e\xeb\xc8\x91\xcf\x1d\xcb\xfd\xd8\xd4O\xb5\xd3x8j]4\\\x9f\xf9\x148\xa2\x18\x96G\xcf\x98\xe7\xc2\xc2\xfch\x08\x0b\x7f\x05!\n+ \x8e,\x9b\xdd%\xa5\xda\xd2\xc0\x922.NC\xfe\xda\x84\x92\xdbt\xe88^\x1e\x05\x04\xb9\x9d\xd6\xbfK*7\xdce\xa6\xcc\t\xdawb\xe87\x80\xe5K\xb4\xf5\xfc\x16\xfd6d\x06\x0fd\x10\xf98\x0f\xf2\x8bo\xe0`\x19\x12\x12\xcb\xbc\xd3\xf6K\xa5\xab\xaf\xe0\\\x9a\xd2\x9b\xdcy\xdd\'\xc2s\xc9\xa0`\r\xdb\xe9n\xec\x95\x96c\x13m-\x13\t\x06Nwa\\\xab\xe7CL\xbeS"]\x1eX/\xa5N\x8c\xc9:2\r!\x0f5\xe4\xaf\xcdL"ME?\xb2\'\r\x18LD\x10v\xf0\xa4\x7f\'8\x9b\xab\x7fq\x07=\xb4\x87\xa1\xa8\xcbX\x04\xb1`1\x947|\x99\x98v\xe5\xf1\x9f\x8b\xfd\xd3\x80Y\xe7\x95\xf6\x9c\xc1\x91\xb8\xba\x03\xc6@\xef\x9dF>W\xd9\xd7O\x9dy\xe1Y\x1a2\x93\x08\x88M\x97A\xc3\x89\xc1\xcf\x02v!\x80\xca\xc0\rRL\r\x1c\xa2u67\tK\x81\xd8d\x8e\x80v\r\x9eYJ^\x94\x129\x107Y\xfd\xc6\x9b\xef\xd7\xd3}\x8a%Y\xdb\xe4\xb5\xb17\x1dXr\xaf\xa3\xf3\x1d\xa4;.\xbb\xc0\x0b\x9dv\xd2\xa1\x8d\xd1\xaeC\xf8\xaf\x1b\xf8\xbf\x81E\x12Kp\x98eh\xf4qB3\x97\xa7aZ\xc8\xcc\xc7\x1e\x97\xb7\xfaH.\x99\x1b%\x8b\xbf\xbaq\x92sp\x1cR>\x8cD\x98(\xb1\n\xb7\xbe1\xf5\xc4\x11\x84\xc0s\xfc\n\x13Scj-\x88Ve\t\x06.\xc7&\x8c\xbd`\x80\xcf5\xe7\x8dX\t\x80$9\xce\x99I\xdd\xf0\xd23P\xbca\x88\x85UC.+!\xbdJ\x7f}\xabO\xfb\xb5\xe1\x84\xb84\\Kh\xd1\x1d)K\x1b\xefF\xfb\xaf\x87\xa9\x17\xa8i3\x1en\xd5\xbe<\x1d\x07\xb1\xa5\xc1k\x84k\x7f0\x9f\x08\xe8\xb2i\xc7m\xbb\xe3\x06~\x17\x1d\x8e;\xa1\x1d\xdb\x13\x84\x99w\x9a\\\xc8"\xea\xd89\xe5Y\xa6\x96\xbc\xcb\x85\x92\xc8d\xe9\xf2\x1f\x93\xa16\x8e\xac\xe2\x90\xac\x83v\xd0]\x10\x856\xc3\xbd\xfb\x03?\x1a\xe1-\x03b\x13\xbf\x8c\xac9":g\xc0\x8c7M8\xbf\x0c ;\xb8\xc6\xddI<\xe7\x8a\xfd8\x83\x85\xe8\xd6&\xcb\xaah+\x12\x8ay\xca\x10u\xf2\xa9j\x86\x88\xf0\x14\t\x8aS\x18\x12\xe7\x14b\xba\xb4\x17\x7f\xd4\xb8h\xb2/\x85\x0f\x7fj?n\x18\x04\x17\xea/;\x1d{\x8e\xdef\x10"w\xdb\x9513E\x81\x0e\xdb\xcf\x97\x8dUw\x8e0\xfe3\x9eWj7\xa9\xd6\x83\x06O\x13u\xeaP\x8c\xda|\xe1\xb8\x02053\x15w\xa7\xb7\xcd<\x92l\xd8\xa6P\xe1#\x9d7\x9c\x8d\x8ev/\x95\xa8\x1az\xb7 \xeb\xd9\x8b\x93\x8f\xa0\x8c\x98\xb4\x9b\xab&%p\xc03\x90\x9fih\x12\x08\x8f\xa9}\xbeA\xdc\xab\xdb\x1aP`\xef\x14\xa468\xd2Z\x80\x88R<\xe67\xd0\x86~\xda\xeaI\xdbN\x1b\xbf\xbd\xdd\x87+\x8f\x90h\xd7\x96\x8d\x1a\x07k\r\x02\xd4\xdcUS\x9c\xa42\x08+\xd6\x11K\xbbl\x13\x8e\x95\xd2i\xb7h\xbf\x03<\x9e\xc9;\x0c\xd3\xa2\xd7b\x15\xaf\xbdV\xf0\x9a\xd58c\\^\xec\xb5\x8c"eJ\x9eo\x91\xb8\xd8\xba\x99(\xcbS\x0cgQ;C\xe38E\xbd)\xf62\xd5\xdf\x1d3\xba\xe1\xd2\xe59\x95\tX\xa1\x9bi\x1b\x0b\xec\xc8mM\xa1\xcf\x91\xa2\x1d0\x1b\xcb5iKf\x1e}cg\xbb\xb2\x01\x95zq\x95\xa7\xf03Q\xe4X\x85N\x04\x0b\x96\x9c\xfa5]\xcer\xae\x98aef\xa3B\x06\x1eL\xce\x9d\xa7\x8bnE\x12(\x06\x19\xbcl\xbc\r+\xdd\x8a~\xb4$\tu\xee/e\xf3p\x9a\xe9x\x03\x12\xdd\x0f\xa5\xd8\xaa\xda_\x08\x80\xea\x13t\xb7\xb5/U$\xa8]\xc8\x12\xec\x9b\xf4\x90q\xe9\xd8\x1dy\xc9\xf2\xd9\xb6\xc5\xcb-u~\xc5\x02;\xd1\xcf\xe2\x9f\x88:\x99q\xf4RB\xee\x0e\x03w\xd0\xf6\x93\x1e"\xdf\xbe\xd7\x90\xaa\x07\xee?\xc4|\xeb\x8d\xb5\xd1\xd0\xe6\x81{*{r\xf5Y\x00\xf2ns\x13\xf6o\xe2]\x8cBH\x04&\x85\xf3\x18\xa9S\xef\x0b.&\xaa+\x0c\xc3ebd^b\xddqt\x93G\x91\x0c\xd7\x83\xc9\xf1W\xd1t\xeb\x1a\xf7p+\xa4z\xd8\xb1{\xd7\xb6\xccy\x88\xc9\x89\x1a\xac{\x0c?\xc1\xc2\x01\xdb\x91\xe1\'\xb1\xef\xc2\x83\xde\x80XBB^?\x1clY\xc8\xb0\xed\xa9\x0b\xa8>\x8de\xf3\xc8\x0c\x96#\x1d}\x13\xe0<\x05)\x7f3`H\x9aW2\xccU\xc31b\t[\x87\xb1\xbb\x05Lg\xeb\xc62k\x8aR\xf5z\'1\xc6\xa2#\x1e\x7f]\xe5\x87_p\xdd\x87U\xde\x81.\xd1Y\xec\x02\xc9r\xa9\x7f\xf3\xf3\xad\x02\x94\x88P\xd4W\xe7\x07\xb8*\xf2\x8aEt\xf5\xa7Z\xd4\x15\x99pp\x8c\x8d\xb2\x16\xc0t\xaf?\xa2gY>a\x88o<\n1_~?\x0bP\x01\xb5\x07\xe3(\xbe@\xcb\xd9\xa92\x8f\xee\xe1&NI\xe0P7U\xab\xc6\xeb\xbf\x0e\xabA\x88>\xdb\xc5\xc7re5\xdd\xadtO(*&\xd4\xfca\xc7N\x1bv\x8c%\r\xde\xd3\xa4\x95\xe1n?\xb6n\xe2f\x98\x1f\xab\xd8|W\x96!\xcf\xa3\xfc\xae\xd3.l\xdc@e|\x82\x07PS\x1e5\x1d\xdf\x05\xaa\x81\x8f\xd4\x0f#\xbbA\xe2o\x91`?\xc1\x12e\x82\xc6\xaf\xcb\xfe\x14\x05\x04\x9f]\xc8\xf3\x11i\xb4V\x9c\xd7\x12\x85\xf4\xab\x8d\x10T\\\x847v-\xfd\x05\xa2\x82n\xe9[*\xf4\xa3\x05\xfd\xb2!\xff\x18\xcf>\n\x8a\x9fRc\x91\xa3x\x04\xe7|\x85qI\xb2\x0b\xeav\x95;&\xcd#\xdb\xcbvO*\xd1\x14-am4\xd5M\x10\x02\xe4V\x7fh.\x9eYb\xeb\x7f\xf8\x86)\xf9i\xd9\\\x90\xa9\xf8\xe8#\r\xa4\x84\x92>!\x94MOD\x8a#I\x83\xaf\xef\x8a\x0b\xcb\x9a\xdc6\xb3\xd2\x97\x8c\x0bs\xd3\xf80\xa9o\xb9\x08\xd7\xdfH\xa4y{8\x95\xe7\x18\x10\x87\xc3\xb4\xbf\x12\xa8\xd8x.\xd7!?t\xf1s\x86J\xece\x11v\x93\xbe~\x1d2\xafLj\x14\xa4\x81\x9c\x9f\xfd8\':\xdcD\x12\xe9\x86\xb3\xb6b\xbf\xa2\x97=>\x841|\xe6]\xa4F\xde\xde.%\xdd8;\x87\xdc\x05H)5\xd1\xc0\xa4\x16\x91f\x82\x93\xf3\x17\xee\x05P#\x1e\x95\xd0\x0e_:[x@\x1b\xdc\x193B\xda@W\x04\xae\xc3\x1d\xcb\xcfHz\xb0\x97!a\xab6^E\xe5\x06\xaa-\x8a\xc46\x8d]\xac0w\xbd\xa6t\xe3\x08<KT\x94\x9aW\xdes\x98\xcd\x1a\xf8\xe0a\xa5\x82\xc2\xc6\xe6:dZ\x19Y\x08\xaf\xcf:\xd2e\xa1\xbd\x99Z\xe4\xf5\xa0\x9c\xeeUDdo\xf2\x8eo\xbc\xc2O\xee\x0c\xae\xb5y\xe1\x96j\x18\x0b,\xeb\xbe\xc3\xa3\xe4\xef\x12c\xc7\x8d|W\x85\xb4\xfd\x9f\x1dey\x9a\xc8\x1b2R\x15[]\xd7\x96\xa7Ss\x13\xed0ge\x0b\xc3x\xfb\xba\xc7\xebo\x8bm\x04Tq\x90\xa4\xec\x15\x8b\xcd\xfd\x9f=\xd4\xa2\xed\xaa\xbdB\x14)K\xadt\x96\x9f\x90I\x9a\x0f\x1f\x83~\xa7\xd7\xd9\x00\xcci\xb6;K\x84\x1e\x1d\th\x11x\xe4\xf5\xdd\x8b\xe1\xaf\x94\xc7\xb7\x83\xd8]\x1b\xbe;\xec:_\x85\xc9\x02\xb4\x0e\x8b&\xdbN\x92d\x01\xfd\xb3\xc5@<\xe3\xd6Q\xad\xd70h\xd3uih\\\xb8w-\xe9D\xa5M\xd2\xe3\t\x1b@\xd4\x8eDW\x9dV\x0f\xb5\x12\x80\x9fN|\xc0\x05;\xc4\xae\x1cS\xcd\xaf+\x8b\xfc\xbf9x\xa5\x18C\x1cL\x19\xc87\x03\x9e9\xca\xfe~N>\xe7\xf1O\xcf\xd8S\xd3\xd3-.\x9d\xecJ\xb8Q\x84S6\xc0$\xe7u\x86!\x07\x06\x80\xcf\x83\x80.b\x1b\x91\xc7\xd2\t\xb5w\x0b}JM\xd2HW:BX+UG^B\xcd\xd7\xbf\x14\x1b\x9a\x16\xa5iA\xee]\xf0b\x04\xe8\xc6pn\xe8\x1eXd\xa2m\xb0\x19>s\x1e\x80|\xda\xea\x86\xd1\xf0\'\xb9NN@#\xb1)=\xed\x0e\xd2*\x06Vv\xf9\xaa\x02\'\x8e\x98&\xa4G\xa9vk\xc1\x07\xad\x8c\x8f\xc0\xabf\x96\xc9\xd6\x98\xfe\xd9\xf8\xcb!\x06_\xeb\xf0\xb0\xc5LG=/Zv\xe3\xa7\xc3\xa5xl~R\x0cB\x01g\xe9\xc7\x83*\x8b\x94\xfb\xa8($\xc4\x11\x85A\x1f\xd0M\xc7\x0cFXp,*\x9a>6\xb7\xd8A\xc0\x90]ohr:\xea\xcb\x8e\x9e\xa1\xc4d[\xf8p\x10\xf0\x8d\xe0\xe2\xb1e\xe6i\xf2\xb17\xcahK\x81\tS\xb1L$\x96k\xbd\xcfp\x8a\x13\x17\x95\xb1\x1dx\x96\x93g\x95K\xe8VD\xcd\x8c\x80L\x0f\xc9@\xa7\xae\xf7)\xd2\x9b\xc9m\x9b\xdb\rL\x8b\xc94\xddc#\x97\x8a\xc5gpe^\xafG\xd8\x00\x12\x89tciu\xc6F1\x1e\xa6E\xb0\x9f\x91i<D\xa0\x8b\xb1\x83\x05\xb5\x01\x051\xad+\xf2>3\xc4\xd4\xc0\xf4\xf5QN\xe0s\x10\xc4\x91\xbe\xa1\x13\x8c\x10\xf0\x19t\xcdn\xb1O\x05\xc8\xc1P\x8cw\xa9(\xd0qP\xb3\x0f\xb7{\xf7\xa3\x9d$\x89=-:\x08\xf9\x94`3z!\xc6T\x1a\x05\x0cYm\xf6A\xbf\x97\xe6k\xfe\xb6\x9d}\x11\x15N\x9f\x8d\x0e\x9d\xc9G\xc96\xb2\x92\xd4\xd1\xfd\xdb\xb7N&\xa2N\x8dR-\xda\xa6\'\x0fW\xb4cs\x8d\x9f5\x13\xf0\xf4l\xeb\x1f\xec\x04\xeb0\xf1\x8f3\x7f\xbd\xd6\x82\x1f\xca\xa7-\xf1|Z5\xcf2\x90\x96\xbe\xc0b{\x15@\xd7\x91L\x81-C\x99\x1b+&}\x07&4t\xd1\xb4E\xd45c$W\xf9\xc78h\xe0fc\x19Q\x90\xd4\x06\x00z@KE\xa3\x17y\x930\x97\x8cp\xfd\xd1\xb6\x0b\x81\x93\x0c\x10\xef\xecG|\xe1)\xc9)\x1f\xe7`\xee\xf8e.f\xa1\x1b\xccn\xec3f\xe1\xb6[$4\xf8Y\xdfS\xf8\xbb"\xe3\xf4\xd5(\x90H\xc5(A\xabN=c\x12\xe5V\x8aY\x7f\xce\xd2!\xd4\x1ct\x80\xde\x19\x01:!\x98\x9b\xda1\x00\xd9\x1b\xb0\x94\xaf3T0\xa3Y\x801\xf4\xc3\xca\xfcb\x90z\xe6\xa8\x1f\xfc\xe7\xb0\x9c\x91\xe5\t<;\xc0#\xfa\xe4\x9bz\x0b\x92\x06\x0b\x81\xb6.\xc1\xbc\xfaX\x81#Jk\xfc\\\xef\x96\xcd\xdb;\x8e\xe8\x14Q\x12I\xdb!\xffV\xe8(\xb6\xc0\xcd\x05\xf5K\xb2/\xe0\x12\x91\xad\xf4\xe0\x95u\x8e.\xf0A\x85h\xa5\xe0\x1e\x1eR\xc8W\xe0<\x85\tq\xf4\x98\xb4u\xe2\x1c\xe8\xa4=\xfb5^\xde7\xde\x82\x87\xe3\xa6#H\\>W\xe0|\xdd \xc55\x9dk\xfc\x8b\xb1\x0fe\xf0d\xce\xf4\xdb\xbf#[}p\x95\x065Z\xf8\x19\x98f\xc3I4\x9f\x996R\xect\x9d\x0c\x19\xfb\x15L3\xecg\x00\xd9I\x80Z_\x9b\x96\\\xb7>r_\xb5\x99$"\xf6\x08\xd1\xfcR+\x153\xeb\xd5\xfe\x90Gjs\xa0\xc0\xab\xe1\xcaq\x84\xda\xd1\xd2e\x1a`\tS\xca\xdeCN\xa1\xd4e7\x9c\xe8?\x98PD\xf87\xa6rO`V\xca\xbe#\x01=I}\x1db\xaa8\x02K\x93S\x1b\xa0ON\x93\xb1\xba\xb1>LY\xd5;8\xe4JE\xc3\xf5}\xc9\x82\xdb\xf9pH\x0ck\x16L\xdc\x8fQ\x12\xef\x98A\xfe\x06`c\xf4";\x00B;\xdawa\xb7\x14\xd0\x86\xe6\x16\xd0{lC\xe95D\xef\xb5[\xec\x9c\x9b]\xcfj\xaf\xe4H\xb6`|\xaa\xb5\xce\x93~\xe7\xd5\n\xd0#\xd5"\x86\xfdf\x92\x1abS!\xb2\xca\xdd\xc5D\x9d\x1ak((\xd7\xdf6\xc7\xd9\x8eY\x02B\x87A\xfd\xe6\xcd\xab\xc2h\xa0\xe67\x95\xa8\x8b\x98=\xb4|Cm\xe7\x0e\xc8,?"5-\x9c\xacV*+\xb6\x19\x8a#\xc8oe\xdd\xb4eb.i\xdf\x02a4k8\xc6\x0f\xb5[\xe2\x96\x06*/\xd8\x08\xde\xe7*\xb9%\x93\xa9\xae\x07\xb8\xf6[JR\xb7\x13\x9f5\xb6\x15r\x11\xe4\x9f\xfak\xa0\xe1%*\x82\xb7\xd0\xfe\x1d\xe3\x07!CI\x86v\xa9i\x8cF\x05\x16}w\xcdV\xa2i=\xd2j\xcf\x9e\x8b\xc6G\x88\x11\x8e\xae\x8bd\x01&\x07\xcf"L\xae]\x80{\xd1\xbes\xa1\xdfM\x05\xad\x07"\xef\xb9\xe4\xe6\xcd\xdekn\xf1C\xf9\xbc(N\x1c\xa8\x0cG \x7fa\x8f\xea\n|/\x14\x88^\xc88\xea\xb8\xa5\xa5\xd2L3\xd3\x18%i\xa9\xbfN#Xo\xf9`\xe4\xe5\xe6\xae\xd1\xcf\xe4\xde\x89\xd5\xd4\x18N%\xbe\xb7F\xba7\xd4\xca\x12B$\xb8\xab_\x04\x8b\x8cp\x8e:\x04\x9d\xedE5CM\x88\xd1\x16\xdc\xf0\xd4\xf2<\x8c\xa0y\x7f\xbf)C\xe8Z\xacx\x1f{s\xe8\xd8c!#\x93\x87\xe6\xf2\xa9i\xaf!\xd8\xa9\x10\x94\xb9\x1d\xe1\x93\x91`l\x9fe\x926\x88\x01\xeb\xe8\x1f\xfa~RYB\xa8\x01\xeeW-\xfeM\x86V\x98\xa8\xa5\t\x91R\xac\xec\xfd\xdbh\xe5[\x99\x14\xd8[gkG\x0ee8\x00Ex\x9c\x93\xe5\x89\x90\x05\xcbz0\xe1\xef\xce\xcc\xf5O\x17\x02a\xf6\x12\x9a3\xe6>\xea\x9a\x9dL$%\xd9\x8d\xe0\xa3%\x98\xa6;\xec\x01\x9d!Y`W\x1777\xf8\xf7L\xa1z\xb5\xc2\xbd\xb4X\xa2\n0pJe\x08\x13\x02\xfd\x9f\xc5\x99D\x01\xa0=\xb33U\x07\xa34H\x06\x9cZ\xbb\xf8\')U\x1b\x1d\xda\xab/\xc1\xc0\xe2\x1c\xfe\x81\xb3s\x99p\xf1\x14\x12+\xda%Z\x9c\x10\xb6\x8fL\x1a4n\xf1\x16\xba\x80ii\x0b,\xc7\x1b\x89\x0e\r\xa5\xe4~H\x0e\x9a|\xae\xa1IZ\xc1G\xe4\xaa\xfbG\xc7\x80\xb3\xa1\x97&\xb0*\xd3\xd3\xa6\x07\xc8\x01\xf9\xd8\x95\xa8nl\x80\xab\xefS\x01/VP\xa1\n\x86\x0fS\xc0\xafe\xdc\xb5\x19.\xa8~\xfc"\xb9\xa5:\x1b\xd0"d&8\xe6}\xee\x88\x97\xb3o\x1c{\x7fY\x89\x10c\xd4\xde\xff:\xd6\xfb\x07\x9b_\x837\xbf8\xfd\x85q\x08\xe1YfI\xf8\xf7\xd1\xc3\x91gU\xeb\xce\x06/\x15\xcb\xf0\x11\xf9\x1d\xd9@\xb7e\xdd\xa8d\xb6\x86\xd5\xa7\xb2T\xe3u\xa9\xb2\x0e\xa5_\xb0\xdb\xc0\xbb\xa2\xdf\xb4f\xce\x19\'\x1a\xa2\xa2\xcc\xa3E\xd3|\xca\xcc\xed-\xf6!\x1f{\xc9\xea&\xd8\xaf+\xf39(\xd9\xbe\xa6!\xb0\xa4KO-\xf0\x9a\x98^h\x15\xa4\x86)\xb0;f\xc2\xad\xa5\xe7\x01U\xb0&\x0c+\xbf\x05u\x89\xe4\xf5\t\xb4Y\xcd\x80\'\xb9\xde\xcd\xacV\xd9\x1an\xf2\xa0\xc6\x07Tl\x18F\xd1\xd8\xf5\x94;9S\xf1Q\xf2\x84\xa0fUkz0\x99\xac\x9bMm1\xc5\x87w\xe5\xae\xfc\xcf\xd7\xad\x1e\xd0( \xd9y_}Q\xc4\x11&\xa7z6@u\xd4\xa5\xbf!l\x81\x8a\xe0w:\xb6\xf6G\xd2\x89\xad\x8fk\xdcF#\x96\x8ff\x85\xf3>O\xb2\xc0HEa\x15\x15t\xa8\x0f\xb9\')\x86\x99\x82\x83^4\xbc2\x8f\x16\xb9=zN\x10:\xa6P\xa9\x02S\xe2\x82\xd0\xf7\x1c;\xe0\xa4\xd2L\x998\x0ck\x7f=g\xbe\x16\x87\x1cT\x98\xfdF*_\xc5,G\rI\x02\xdb\xf6\xe7$\x15\xeb\xb3\x95QZ\xecS\x11\xfe\xb0^\xed\x17\xb7J0\xa6\xa6\x8a+\xc2\xc9\x19\x1fp_\xb6~\x0f\xf9[\xd3\x87\xeahED\x12\xb4\xa6\xf7j\xe9\xa6!v4\xcb8:\x10\x8b\x12A\xc4\xe6j\xb8\x1fqX\x99:\x8b\x10B6\xf7\xc1H\x8f$\r\xac\x98\xce\xf4\x98\xd7\xff\xdb\xfd\xa1\x0b^G+\x9d\x7f9\xc1\xd0\x159f,\xf2\xa3\x18q\xcb\xebD\x92%B\xe2,W\xc4\xd4eU\xbe;\xfa\xcaN\xb84\x0ei,Z\x8e\\I\x81)\xc3\xde?\xcb\xd8\x01\xab\'I\x9c.\xd2\xbc\r\xc0\x01\xf3\xf4=\x9e\xb3\xdb/\x8f\x1f\x03\xf7<\xe1\x94\x83\x1cD\x04{\x81\nEq\xf4\xa4\xf2\xe4\xf5l\x12\xad\xbb\x13\xe4\x1a\xcc\xf0\xd5^\xb5\xf3\xc0\x80\xc9\xb0t\x0b\\\x1dR\xbb\x08o\xe2\xb5\xbe\xed\xc9\x17\x8e_\xfc\xb6\x1c\xf9\x8d\t\x85I>\t\x91<\x95\x1e>\n\x86\xffU\x07\xb1h\x04\xc4r\xad\x10/m\xec"\\.\x92\xb2e\x91\x91\xe0FE<\xe2\x89u\xb6\xd0\x8b?0\xc3\xe5\x97\xc6\x8b\xb3\xa9)\x88\xe0\x85\xd7\xaf5U\xa22\x00\x8a6\xbc\x99\xc73mH\x04\x0c*\xbe\xc5\xa0\x89w\xaeB:\xd5\x97k\xc9\x9f\x80\x87\xb32\xe4p*\x17]h\x98\xca\x87\xc7\xc4\xfd\xaf\xb7.s\xf5\xb0\xa8fe\x97\xd5|\xaal\xa4:\x9ft\x8a\xca\x8a\x80\xba\xa7\x94\x84vh:\xce.{\xf9\x83\x15\xbcO\xed\xce\xa9\xc7C\x1f\x9f\xc0\xff[\x9c\x07#\x80R\x81\xdd\xb0\x11D\xe2\xfc\xf2\xeb\x88)a\x9d|\x85_\x17Fw\x96c\x01+U\xef\xbb<\xfbtE\x08\xba\xff\xe6\xc0\xcfmK\xc2\xa5r\xaa\xe7W\x07\xda\x9f+\xa0g\xb5\xd8\xd3+\x93\xd2\xe9\x86Y%\xcf\xda\xf9\xe4\x0b\x08\xe7\x0ebh\x82\xfb\x9f\xb8H|\x1b\x00\xe4l2\xdey\xdc\x03\x8a\xb7!6\x0f(\xc3\xa6\xe0\xd2\x0eAP\xa1\xff\x99a\x99\x1e\x99\xbac\'\x9c*\t\xce\xeb\x1b@\x81/\xdf\xf8\xca\xfc\xcc\xeb#\xcfT#/\xc7`\xb0N\xca\xf3~<\xf9[\xa2\x95\\\xee\x93\xcd\xee\xd60\xc5\xa1\x9et\xdf\xfc\x17&t\xdcts\xa95\x90\x10\xb44\xb2\x19eB\xe4E\xc0\xcc1\xee,o\x04\x15v\xf1a\xac\x89PF@5\xc5\xe8\xa5\xee\x0f )Ev\xe1\xd1\xed\xa3\xdd\x96\xf7\x9fKf\xbf\x084@\x12*4\x11\xc3\xf1\xf3\xb17C\xdd\x88A\x961\xb0ui\x8aHA\xc1e\x00a\xa5\x9b\xd4EPe~\x04\xf8\x0f\x9d\x15}\xef\xe2\xfa\xd4\x8d\xae\xd8\t\xdcVe9}naR\\\x93QYd\x94\x1c&,\xef\xc7\xd5\xd0\x80\x99/\xd8\x85\xc2\x08D\xd3*P\xd4\x1d\x04\xb6\x06\xa9mc\x9a`\xa1\xb5\xcdb\x87\xd9\xd1\x84\xc3\xb3$C^\x04\xb6\xce\x01]\'\xb9\x14\xe8\x10Q\x17\xdb\x1eXK?k+\xb8\xed:\\\xa3#K\xb3\x8a~\x82 \xd0N\x86\xa9\\J\xf5\x05 ?\xb7\xb4\xca\x18\xe8\xc66N\xc5\xf3\x9a\x00"}\x0e4\xf8\x0b~\x906\xf0\xc7\x89\x93p\xb3\x16\x17\x96\x93s8\x99rn1_o\xe9\xd5\x90\xca6\x17\xef#DWT\xbe*?\xe3\xa3\x97\x8c\x9a\xdc\n\xc8\x0e3\xd8\xfe\x98\x1b~\xe94m\xd8/?\xb0\xaf<y\xc2\xd0\x80\xcb\xfdB\x81VL\x93\xfa\xd3\xb4\xa3D\xcd6\xaf\x87lf\xbc@\xf7\xd6h\xa6\xd8\xb3\x82\xf4\x9d\xa9\xdaK\xf2\xd5\x99V\xc0\'\xf9\xd2S:\xdbB\xd4]H%\xe5\x84\xd5\x87\xce\x04"\x07\xdcD\x01\x19\x8a64\xeb\x92]\x1d\x90\xfa\'\xc9\xab\x96Ya\x0c\xa6\x9e:9\x1a\xe2\x9b\xebd\xc4l\xc1z$Uc\x82\xb4\xd9\xcb\xacT\xa1\xeb\x06x5\x1b\xf8\n\x04\xa9\xb8\xac\xffN\x8b\x9a\xccx\x01\xcba\x86\x07\xef\xa9\x17\xdd]\xe5\xa1\x91\x12\x9f\xb8\xea>\x0c\x92\n\x11\xc6"\xa5\x02U\xaf\xff39\xcf\x8b\xff\xa1\x1bb\xf4\x84\xcd\xc9\xf8\x81?\x83\xed\x0b\x19\x87\x9b\xe54CYqfA\xe4\x07\xf1g\xe8\xa2\xe8\xca\x9a\x81\xbe\xe9\r\xe2sE/\xc5\xdcb\x07\x9bh\x0c_|\xaa\xf6c{\x9eR\xa88\xa4\x96\x04\xa2\x94\x0fI\x1a\xcd\\\x00\xb1\x86G\xec(\xb8ig\xff\xa5\xe1e\xfd#\xf8\x07\xf0{\xe0\x95\x9d\xbfs6w\xa5\xa1hr\xc4\x15%U\x16\x83|\xb1>TOD\xde2M\xe1\xa5\xcb\xee\x01[(=\xf3\x9d\x7f%8F\xa0cD\xc1\x16\xf2\xf8zo\xeb6\xa0\x99VO\xae>,\xc9\xdd\xb1\xe1=\x95\xd6\xe4,(\x07\xecA\xecqq\x03\xef|\xa3k\x17\x89%\x01\xce\xbeW\xe5\x19\x08\x17\xe5*~z\xb1\xe6T\xaf\xb6S.h\xc3\xf1\xfd|\xbc*M\xe6B\xe3\xa5\x0fMQ\x0eSA\x89\xaa\x9d\x12V\xc9\xc2/W\xf0hX|)\x8f1q0\x9f\xbcJ\xf9\x8a~%\x17\xe6?oP\x9b\xb0\x8f\xbeL\xcd\x96 \x8f\xcc\xfaoK\x96\x90`\xf4\xe0\xce#6\xe3s\xbb:\xd1\xa2\xe3\x19\xbc+\xb8\x87\xc8\xf6\x80\x1a\x02Y"\xb7\xccE\xd3\xb6\xc9\x9fR\x82O1"\xaez\x17+|\xfbin4S\x9d\xdaf\x14\r\xaaDB\xfd\xd9\xd2\x8dR\xba!\x0e\xf1\x81\xdf\x1bd\x02\x03\x10\xf7\xbdV\xf6\xa1\x97\x02\xc7\xd8\xf8\xeco\x94k!\xc7\xf7l\xc5\x1d\x8f\x95\xb45\xc8\x88P\x1f\x98\r\xef\x93b\xe3\xe9\xfc\x8c\xf9\xb9\xb2\xbdy\xf3c\x07W\xe6\xd2j\xf5]\n2v\x01\xbfF\x04Z"\xe1/ R@\x827y_\xe0X\xa0(db\xc7\xa5\xb7\xab\'\xf2\x10\x00\xfe\x80&P\x7fb\x86\xe6<f=3\xcdt%q\nr#\xdd\x97(\xfe\x8b\x95\x9a\xfb\x15-\xa0\xafc\xf4r\\\xf1\xad\x9f\xecg\xf4}J9\x17\xb53\xb2P\xf5\xa9O\xe2\xb8%\xa9\x83\xf40\xda\xad\x93}\x85\x97\xfd\xb8s\xd4\xe4 \xb4"b\xc5k\x1e\xe1\xa9\xca\xe0T8\xf5\xca2\x10\xb6\xed\x0e+\x93\x97\x84\x8aO\x9f\x18\xb6\xf3\x14\xa5\xbeO\x0e7\xef{%\x02-:\xd0G\xa4K%\x1dW\xc4R\xf3l\x94\x01\xba\xc5\t\xe8c}\xc6;\xb0C\xa9\xce,\x7f\x9c\xeb\r\xb7Q\x9ch? \xe6[\'H\xdf3\x9d\xf3\x99Z\\\\\x91\xd5\x1f\xadZ\x98\x81i\r|v\xed\'\x95c\xf1,\xff\xf8\x14\xcb\xf1`\x1e1H\xd2r\xact\xde\xd8\xb6VA\x03\x14\x04_Zl\x8a\'b^i\x06*\xa8\xc0\x82\x88\x955\xb0y\xb5<\x8c\xe2l\xd6\xdd\xd8\xf5\x80\x8a\x99\xc2\xb3nC\xaf\xfd9\x1c_l{#\xda\x0b\x01j\x1f\x01B4\x12\x9c\xa7\xa2BqV\xfa$\x9d\xb8C\x0fY\x8ea\xe7*K\xafK#v-!\xb7\x99,\xfdI\xb18\x83!\xe7\xea.a\xcb\'=$\xccc=l\x1a\xc2;K@\xaco\xd5\xed\xe9\xb5\xdfA\xd9pw\xc1)\xd1b;\xb5\xdb\x99\x99\x0b\x88+\xa6[Y\xb4]c\\\xc4\xfb\x05\r\xf9\xba\x94\xeb\xca?\x16\x90V\x8aS\xfe\xa6\x8c\xc8k\xb3}\xa3\xbc\x8f\x95*<p\xf3\x0fo0\r\xe5Xj\x9d\x07\xd5\xec\x12\x1f\xf6\x1d5p\x82\xcc\x83\xd2_\x05\xb9\xe1gBwV\xcd>Ii\t\xaa/(\'e9\xc1\xf9\xfb\xe7\x05\x04v\x0ewt\t\x86\xf6\xe4\x8a\r\xe5\xd1\xccd\xd23Sd\xd5\'\xb9:\xb6h\xe4\x90\xf1\x15\xe1\x19\xbds\xcd{\r6\x9a\x7f<\xb8\xffz\x86n\xb0i*\x823<\xb8\xb8\xaf1\x05:A;S\xd6\x14\n\x17\x0e\xd2\xe3\xb1\x89`\x02\xb3\xc9\xe3\x9a\x12\x19\xba5\xc9\xb2\xed\x84\x1c1\xa7\xa7\xea\xe6\xe1q$\xea\x01\xb4c\xcb\xcf\xe5\xfb\x0e>\xa1yX\x93t\xd7\x1f\xca\xc8\xa0\x9e89\xd9\xb3\xcf\xc2\xf9\xbbl^\x08\xed\xd9\xefq?I1$`\xd0\xe1r\xcet\x91\x19\x08_\xc9H\xb3\xa5\x89\xc8\xba\x86\x87\\#32v\xd4\xde\xde\x9c\x818\xdf\xe5\\b\xdf(k\xb8\n &\x1a\xaa\xc33\x9d\xdb\x9a(F\xdf`\xb6\'\x93\xa4_\x92\xfarh\xc8\x90\xb7\x17\xa5XL\xbet\x0b9v\xca\xfa{P\xdb4\x81\r\xbf\x84%\xf2Hn\xed\xe9v\xcf\x80\x12\xc9y\x88\n\xc6^$\x0f<\x93#\xce\xad\xbc7\xa7j\x0e8\x97\x82\xfcsux2\x14<\x82\xf5\xaft\xd1\x8b{\xdd\xf5\xb2b\x8c\x15\x19\xcdf\xa9#\xfe\xa9\xbe\xcctb\xaa\xab\x006\x87&Yf\xfd\xd6R1^\xc9\xd7\x15\xddr\x97\xc6\xcb\xa6N\xc78\x1f\xdf\x8a0Q\xcc\xea\xf4\xbb\r\xab)?\xc3ue\xf1\x00\xe1\xf5\xa7\xb9(\x917\xc2\xaad\xdc\\\x95MK\xc3\xf2`\xbd\xbdY\x1a\xfa>\xab\x1eP\x9eg\x021\'\xd9-yv\xbb\xd7\xcd%w\x0fA\xd0oO\xca\x085\x98\x0f\xf1\xed\x82|\xf7\xe4xC\xef\x10\xe4t\x03a!\xe3~\xc0\xceh$\x95\xa5)RW\xe2\xa9b\xa5\x98\x0e\x01\xcbd\x9f,+\xbbY\xd0t\x12\' \x80k\xe1\x06N\x04\x10\x80\x803\x1f\x03_\xea9\xd1@>\xdd\x88\xa5\xa9\xd9A{\xda\xca-\xbc\xfa\xfc\x0e\xa7;:\xd6\x1e8\xbc\x93#\'p\xaa3\xbb\xd3\x96\xa7\xc4\x9e\x0e\xeb\xb10\xd4\xf6\xeb\xa1\xcc\xa0\xd5\xa7w\x04"y\x9b\xf3d\xb9?\x9b\xac\xaf\x8aY$\xe0\x91\xda\x1a\xbc\x91\x15\xaf\x87\x98\xa0\x13\xdf<\xd1z(j\x83<\xffK\xa0u@\x811o\x0e\r\xc0\xda\x83:\'*pO/\x94\xae\xbeW\x01\xcaQa\xe7&<\xcf\xf3;\x85\x16_\x1f\xd2\xecq\xe5\xa0\x8f(G\x9ae$\xd8\x0e\x8c\x08\xdc!\xd5\xd5x\xbe\xcf YF\xda$\xcaT\xfeqe\x88K#\xc8\xafH>\x9e/x>\x00?\xa0\xf2E\xfah\x11\xe9\xe54\x17\xfcC)n\x86\x1a\xcf\xf3\xa7\xb0\xe9\x94\x0eP^\x1f\'}\x1d\xf1\xbb0\x03@\x8e\xb9\x7fo\xe7\xfa\x15\xe0\xd2L\xb2\x1c\xa2G|\x05\xc2\x0e\xe6\x8f\x95c\xd5+DP\x0c\x14\x8f\xecm\xf9\xdc\xaf\xfa\xcdu\x80\xad\x86Qc\xd7\xc9O\x14\xf46s>Y\xd0\x9d\xdb\xe3\xa0\xf8p_Apsw\x86Z\xad\xb9+h\xcc\xa6\xc7\xb3\xa0\x9e\xdf\xed\xa3\t\x1a\x90v\x97\x94\xf0SQF\xa2\x97{n`\x86\xcf\xe0\t\xcaK(r\xa4l\x86\x8dd4\\\xddbA(\x92\xad\x95R\xf669q\xa7\xde%\xcd2Zk\xadt\x00\xb6\xb8\xfc\xe7\x92Z\xdf\xa1\xee\xd7\xec\xda\x92\xef\x92mm\xee\xc3\x8b\xe8!O\xd3F\t\x9e:\xc1*H\xc1]\xd8\x19\xcb\x0c6\xf5\xc3\xa5\xbav\xf2\x90v\x00\xb3\x93\xd1\x88T\xd4\xee\xc6\xb1\xd6\xaf!p\x11Sa\x9a\xa2\x8d\xf1\xe3T#\xb9%\x0b\x86\x90\x16\xa2\xfbo\xa3\x9cn\xd74\xbbq\t\xb7<wp\xc3\x92\x1d\x1b\x8b\xd1\x9c^\x87\xb5\xbc\xd1]\x16\xfe\x08V\xf9\x9d\xa2@\xe6\xd7;Djh\xf1\x80\xe1\'y\xb7&\x05^\x92\xc8\xd0g\x88\xb6\x86\xf7\xcc\x87r\x13\xb9\x84:\xa0\xf5\x88^\x85\xfci\x0c\xd3@\x9d\xf34\xbd\xd6\x10G\x18ER\xef\xda\x0e\xbe6\x1d\x16\xaek&\x82\xddr\xa3\xf2t\x91(\xec\'\x1b"\xcd\xb7N*c\xc2\x8b@\xce\x1f\xe1\xc5wLT\x91\xe3\x8c\x0bg\xcb4\xb8\xed\x82\xb5\xb0\xa5\xd3#\xad\x89\xc6\xaeT\xf7P~m2\xcd\x9c\x9c`\x17w\x16\xce\x8d\xf7\xe7\xcd\xd8\x0c\xd0\xe8\xcd>\xdee\xd7\xaa\x86\xf59I$\r\x1a\xecRH\x16\x87\xe7L\x81\xferOtI\x96\x9d9\xf4\xc4*\x08ts3^\x95\x11\xce\x89U\x0c\x82\xc4\xf1\x9a\xd1\xea\n\xda\x11\xb5\x9e\xdd\x17T\xe8\xeaV\xd1\x8bF={\x80Q)\xb9\xc7\x94^\xbe\x90S+D\x88\xf9u[\x89|hC\xbb1\xd1\x8dX\xadU`\xb8\xd8dpN\xb1=\x86\x1d\xfc\xfd\x14rY\xfdi\xb8\xf5\\\xcex\xc1H\x86\xa8\xd3;\xff\x88K\xb5]\x95\xe0\xc3z\x14\x9c\xb3\xcdR\xc2\x843\xe9\xa5\xa9\xf5\x07\x84a\xf4\xe5\xee\xd0\xb9.\xec\xd8\x18GE0\x1dfu\xa8~X\xf3\xd1\x89\xe1W\xa7\xa3\x84o\xcdR\x18\x0f\xe7\x8e\'\x90\x16\xc5\xba\x07$r\x15}b\xaa\x8b\xc0\xa8\xd5&\n\x86\xbeS\xe5Y\xdc\x18\xcd\x15\x87\x00\xc60\n,\xf9\xf4\xd2\x8d\x8b\x84\x0eG\x87\xd6\x9fw\xe5\xea\x98\x12\xaen\x91\xc5\xc2\xc8\xb6\xa6g\'\xe8}0\xfe\x06\x9f\x8171\x01\x84\xff\x11\xd0\x1c\xf0F\xc6\xe3\xee\xb9\t\x08\xc18\x8c\x17X\xa8\xac\xdfR\xa3\xb2\xa0\x1b\x9e\xf8\xdf\x9d\xa8\xd8\xd5\xc1[v\x81\xc9\x90\xb9\x86\x88\x1e\xde\xf4ka\x04\x07\x85\x07\x1a3E\x12\xc3\nr\x92\xb8\xa3\xf5\xa5\x91\x80SIet\xa9\x13\x92Bl\x1f{\xe7\xe3\xf6\xd7\x9d)\x02\x86\x01p*f-@\x92\x0c\xf1"\xd5\x92)>t\xb1\xf4\xd2\xbc\xd4\xe2!\xd1O\xb9\xc7\x063\xd9\xaf\xdd\xdd\x8aY\xe8F\xa9\xbcN\xc7\xbe~=\xcb\x03\xa1@d\x03w\x84\xe5b\xd0q=\xe0\xf8A\x9cD\xa3\xf2\x079\xd2!\x85,@*\x1a\xdf_c\xd3\xdaJ\x9d\xae9\x83?r\xb7\xb82\xab\x9b\x84n\xe4+\x90Yu\x9a\xca\xf5\x9d\xc3\xe9Cy|Q\x85!Qr\xc6\xbd+s\xe2\xf2\x98\x1b\x90\\\xc6\xcb\x178\x17\x9bh\xaa\x9b\xab\xe0\x1by\x1c!\x00\x9fWt\n>(/O)=o\xf5\xbe]=[\xfcq\x8f\xc7\xa8\xc7\xfc\xf4\x9c\x9fr\x17WN\xed\xf2\xff~\\\xf2\x1c\xcfH\x1ejU\xfd:\x08\xba\x91?<\xa5\xe8$\xdc"\x944\xc5\xbf\xd5\xc6k\xb3\xabB=\xc23\xa3\xe1_\\\t V\x8du\xe4\x05\xdck\xd4\xcb\r\xd0\x1c\xaaG2\xb0|\xf8\t6\x11\x1c\xd9\xe8\'\xd0\x94\xa1\xe6\x9d\x19\x96\xf0\xa4\xe3&j\x11\xb7B\xa3\xe1\xb1\x0b\xa2\r\x8f\xce\xaa\x1e\xa6%x\xace\xb1\x0b\x1a`\xa5\xedTb\x00Wh\xdb[\xf0\xe5\x12\nyp\xee\xa2\x08\xbbtD\x19\xb0\xeca+0\x95Lo\x00\x14\xea\xf4O\xa7\xa6\x1b\xfd\xd2@\xce\x10\xa6\xd3\xde\xf7\xcc\xd1\x14b/[\xb8\x1a[\xbd\x8e\x9a<\xba=\xb7\xc3\xb9\x95#)!\xcf\xf4\xfaZI!C\xbe;D]\x88fD\xe4\r\\\xbd\xb3\xc5|\x81\x1b\x9f\xfa\'\x08\x93\xa9\x18\xa3\x8e\xfb\xf8\xb5k\x8b\xb6\x89F\xd2\x98\x88*<\xb1\x9el\x19\x99twcZ\xf8)/\x83N\xef#Ik\x9c\xf7B\xe0\xc6U\xa9d\xaa\x03B\x9a\xbap\xf5\x0b\xa6\\3\xf2\'\xb4\x1am\xdfO\xde\xc1\x13\x94\xc7\xd9 \xa8\x07\xd8\xb9w\xcb\xc0\x1c\xef\x15X\xad\xde\x12\xa7iqB\xc5\x02\x16\xa4\xder\xfd\xd7\x12\xc8\x85p\x8c?\x982Q\xd0m\xe7\\\xbc\t2{\xbf\x00\xe1\xb11\xc7\x9f\x94\xa2\xc3_\xb8\xa9gj\xcc\xd9\x83z\xe2\x0e_\x98F\x98\xa1\xe8\'I\x7f\x00\x8f[\xbd\xc1\x1b\x04\xf0\xb2\x1d\xf6\x83\xbd"\xdc\xa4w\x8c\xf3C\x97\xbc\x9c\xfe!\xbcyn_\x87_\'\xdf\x16E\xa5F\x1f\xc6|0I\x0ciS\xfd\x18\xe5\xa6\x99\x94\xc4\xe8\x17\xd6\x8e\xcf.N\x83\xcf\xb4\x11\x11u\x1d\x18U\xfc7\xae\xd5\x9a q\xa0\x88\xfa\x14\x97\x84`J\xb7\xb2\x0b\xda\xddmv\xc2#\xd9\x99M\xcc\xd0\x04V\xd2A\xec\xean\xfe\xe4p`\xbf\xb7\xaf\xaf\xab\xfb\xa2\xf0\xd4\x063J\x1f\xbc|oc\x02\xea\xb0Q\xdfg\x16Y\xd5\xd9\xfb\x8a\x98S\xdbh\x02]\x0f\xd2\x1a\xe4I\xa6\xed|\xa9\rz\x9ad\x18\xbf\xb8\x15\xfc\xb8\xc7\xaf\x1e\xcf_R;~(g\x0c\x94e\xd47\x91\x10\'J\x0fl\xbel8\x8d\x11J\xb3,\x1f\xa1\xe4\x07\xa4\xda\x81|jvfK\xa3\xac:\x04\xc6k\xa6P"K)\xfa&\x18\x15O{\xb6\x08\x89\x04\x0fZ\x018\xf3\xe4\x11\x11\xf3\xadh\xc85\xe5Z\x9b&\xed\x8d\xf7\xea4\x94\xa7\xf8\xd3r\x9a\x92\xf1(\xb4\x1d\x9a6\xe7\x9as&\xe9\xd4A\xb7*Q\xdcK>\xb3\xa4\xa1\xf8\x8b\xc1Vu\xbd4\x11v$\x18n\x06\xbc\xbf\xed\xb8\xe59@\x16\xd7\xa4\x17\xa6{\xf7\x006\x90\xa3\xfdV\xc3CA\x89\xc0{\xf7SX\x05\n\xbd\x9e\xfd\x06\xe1+\xb9\x16J\x1d@\xe9=\x85!\n\x04\x1a\xf9\x86\x01\x8f,p\x05hl\x81\xfd\xc1\xccc"$\x1aO\x18&\\\x0c\xa5\x80\x97\xf6e\xd6v\x1c\x97\x9e\x16\x7f\xf3\xc6#Z\xe9\xd5w\xe2\x1f\x90\x0c|L?X\xcf\x97\xed\xe8\x93:R\xc7F`\x97>\xaaG<=\xe4\x9e}H\x14\xf8\x1c\x7f\xfd\x83\xe1{M\xaai([\xa1%\xc6\xd5\xab\x8de\xb1\xd0\xa7X\x002\xdd\xb4\x8f}\xa2\xf5\xc6\x97\xba\x84#k\x7f\x8f\xd6\xa4\xde\x86>r]\t\xaf\x97v\xfcv5\xf1\x19\x84\x81\xca\xe8\xfb`X\x9d\xbfS\xa3\t\x91u1\xbd\t\xbc\xe4\xa1\x0e\x84.\xe7\xaa_Y\xc8\x06\x0e\xfa\x8dO\x1e\xa9\xb3\xce\x1f7U\xa3m\xf9\xee"7\x15u\x18$At\xfa\x82\x8b<q\xb3\x06\xde\xc8k\xf7\x8c\xba\xed\x8a\xe2\xaaX^\x07J\x9f\xd3k(\xfbB/*\x1dTs\xdcn\xf0m7\x13\xacK\x8a\x1e\x0e\xea\xd7\xe9\xccH\xb1\xb1\xda\xcc\xa88\xe5^\xd9\x12Zb\x90\x9f\n\x96\x0b\xaeB\x9eXy[\x08\xb3\xe3\xbd\xdb\x9fn\x0b\x92\xf1\xc9\x95\xb3b#\xfd\'c\xfc\x19\x8fmR\xe4PG\xe6xq\x8b\xe8j\x1b\xa5,\xa0\xdb\x02Aw6\xf7\xcb\t\xbe\x0f\xa4T\xdb\x196\x96\xed\x08J14\x85_y\x1dK\x05\xbc(KHu\xabpr\xfdS\xa6\xbf#\xedpP\xecH\xb2\x0eaeu\x17M\x97S\xbe7\xde\x8e\t\x82\x93\xb7\xa5\xcbc\x98\xb3\x8e\xef\x92\x99\xc3.\x8b]\x08\x1fM\xc3b\xe4tP\xf1\xd1 \x9b\x18L\x06\xe1\xe3\xc9\x12\x05fP@\xf7U\xc6\x98`?A\xf8\xce\x87\x9f\x93\xa3\xe2\xf4\x00B\xaa\x8a\x9e\xde\xd2\x15&L\xce=\\i\xb6\x96\xa4\xc4\xfe*\xb2\x83\xc2b\xbb\x8b\xa1J\xd2\xb9#\x17\xdb\xd9\x0b\xa7\x0c\x84\xea\x9a\xb8\xd6\xa4\xc5\x1c\xb1hIc\xd7\xc6\x9aw\x06\x9a.n\x8cGb{\x1b\xb1.nX&\x07\xdb\xe8\xd4\xe5\xb1c7\xcd\tO\xd3.\xd54`\xc0\x96\r1\x8e\xac\xcf\t3\xe4\xc3\xfa\x1cg\xa8\xa4j\x1b%\xeb$\x8d\xfd!\xe9\xc3\xad$\x9d\x800"4\x9e\xd8+\xab%S\xf4PTh\r\xd1\x1c\xeb\xfb\rg\xeb3\x1e\xe39\xe8\x12\x1d\x04O\xc4\x95\xdb[,J\x97:\x98,\xc3]\xd7N\xa6\xf9\xc3\x1c\xd0\x04\x1f?L\xd7\xca\x9c\xd4\x073\xd54\xfd7\x08+O\xfd\xd3\x02\x9b\x9d_\x08\x8ew\x94%\x87\xa4\xd4\xd4\xc0\xb9\x1d;\xeda\xf7"\xd8\xe1\x9b7~\xa5BY(\xc0\xed\xd8\xfc\'\x0e\xe9\xfa\x888a\xe6xh\xbfSD=\xc6\x87\xa4\xd8\xd8#\x00\xac^\xf4\xfb\x0ba\x17\xc1(z\xed\xa0\xfb\xbcn\xbf\x88\xfa\xc8\xdcU\x7f\x84S\xf0\xb7K\xd6O*o\xb6\xfat\xa3\x17\x7f\xd1\xb9\xd2"IL\xc61\x0f\xe1,\x0f4ZC\x06\x05\x91\xe7\xcb>{\x8b\xd6o\x02`\xc6\x94z\xbbQ\xc0\xdb\xd3\x80\xd3\x92\xb4J\xd8\x9d\xcbv\x7fo\xd2\x9e\xb1~\xfe\x1b\x18<,(\xa6~B\xa2\xed(l\xad\xf7\x14%\xfb\xe6\xea\n\x10e\xa1\xc3hl_\x8d)\xb5y5\xab\xee[0\xdf\xde\xe6\x8a\xc3@\xd8\xd6\x0ep\xed\xdd)tz\x1c}\x8d\x10\xb0\x03|\x0c\x98\x9a[\xab\\\xf3E\xcf\xcb\xf4\x1f^\xb2y\r\x98\xed\x1dDa\x07I\x11\x1f\x1b\xd7\xe5\x9e<\nR\xc3\x18\xa1\xeb\xca\x05\x17\xac\xdf\xc4]\x0e0\x98\xa4\x8d{q\xd8J\x14\x93\xff\x9d\x15\xe8a\x16\xb0~ \xf28\x84\xbd\xef\x01\xd9J\xfe\xf0\xaa\xff\xba\xcb\x13Nk\x18\xf5\x01\xdd\x10\xb9\xdbF:9\xf2\x9c@\x1f\xfbT\x91$\xcb\xc6\xe4\x02\xa8\xbc*\xcce\xbb\xe1d]U%h\x05\xe4\x0e\xaaS\x8dF\x88|V\xff~\xeb\x86\xad\xea3y\x15\xf5\xf8~\xd5\xc1X[d\x99\xc8\x01XoS@\xe5ny\x1b\x8c\x89c\xbc\x0ew^\x1a\xe5\x12\xe1I\x88nK\x8a`\xff=\xa0\x99II\xa7\xbfV\xe2\x91\xfc\xb97F\xbb\x8d\x1d\xb8\x06H\x1b\x19E\xd8/\x86\xb6\xe8\x8c\xe9\xdbJ|oo\x17\x9f\x9f\x86\xfb\x0f\xcay!V\xb0\xf4R\x17\x8c\x92\x8f\xec\x99q\xa5\x9c"K\xa8\x02\xf5\xfa8\xb3\xab\xc6\'^\x9eo\x13{\xa0\xfc\x1b\xeaz\x14\xb9\xac\x07\x03\x88\xe4\x80KT\xd7V\xec$\x1cFbD\xa2\n\r\x93/\xf30\xa1*J\xab\x04q\xc88T\x11g\xb95\xbbh\xdb\xa1\xac\x9b\x96\xfb\x83\x8b\xd61\xd0\xd0\x0b\xe6\xd6,4\xd7\xb6\xae\xf2P\x984\x81A\xc4p\xa7\x14\x00p2\xe6\xeb\xc2\x84\x04\x9b\x137MJ\xc2\x0c\xe1b\x0bhl\x7f>\xe8\xa8w^\xb1\x06wt|i\xe2\xc8C\xba\xa1\x1c\xba\xf6\xcd\xdc\x85\xa3\xeb\xd6X\t)n\x80NA%\xb9\x1f9\xc5c\x9a\x00_}\x81\x83\x9f\x87\x94nf}\x08\x1c\xacV\xb2@\x07\xf3\x0b\xe5I\x05\xada\x88\xd21\xca\xb1\xce\x0e#\xea\xfd\t{]\xd5\xd63\x81\xe0\xa2\xb4m\xdd\x9dNh;\x0c/r\xe5K\xee$\xc2r\xa2CKI\xf9FO+\x9e\xc3\xda\xaa\xc4@\xb9\xb4N\x0f7\x1cB?$\\\xc4\xc9\xba\x08\x9e\x9a\xed=a\x14>\x9aN\x19<\x0eWq\r\xa3\x92*J\xdf\xd6\x8b\xcbB\x88*\xb5\x0e\x15&\xc4\xfe\xc0\xe9\x01\xb1\xca[\x04C\xbb\xcfH\x00\x99\xd5\xab\x10\x92_0\xe5\x8a,O\xb3\x89\xa0\x04u\xd9\xa2\x97`\x90\xe5\xa7\x0c\x1a\x92{\xd2\x04_mZ\xedsnX\x83w\xe0\xb3\xfck\xd0\x14\x97\xdb\x0c6\x14\xe7\x94m\xdd1\xb6\xdd\x9a\xc8`\x0c\x896$\xd6>\x15\xac\xf5+Ft5&\x97Z\x94\xca\x83\x07\x01\\\xe2\xb0\xc7\xf7\xb1\xe8\x98\x0e\xe2\x93\xe9\xd2@\xf5\xfdc\x99\xf6p:\xee\xfdvw\xa0\xbf\xfd\xcb\xb0$nBb\\.\xa1\xfa\'\xa6\xf2\xc0\x80\xc0\x83cpV\x02\xb5\xc7\x80V8\t\x8ai}\xa6l^0\xab#\xa1m+\x05T\x84\xedMA\x89=s\xf5\x9b\xba\x89<\xe7\xd1\x92\\\xf34#\x89Ot\xd6K.u\x9er\xee#\xdb\xe8\x0e\xe4pK{f:\x128\xe0l\xb9u\x1dH\x1e\\\x93\x9f\x84\x0f\xfe\xe3}\xae\x03\xe5\xda\x9fX \xea)\x92\xb4\xec\x16~\xca\x84\xd3\x80\xb0\x89M\xad\xe4% z\xcf\x18\x92.z%\x9f\x8c-\xbc\n\x8a\xe0^OY\x08\xdf\xec\xe0\xa9\xcf6-\x06?\xaf\xd3t\xcc\xb2\xb3\xf03\x01\x84\t\xe8k\xa5\xd7\\\x92\xc2|\r[,WkU\n:\xba\x19~c\xe7\xcf\x08\x99\xd8(Z\xf9\xbd\xff\xec\xad\xd0X\x82\xfc\xd4\x86\x81\xd8{\x9b\x996\xf3\xc1Ul_\xb5"\xef\x85\xd8_+\x835\xcb\x9e@*\xc3\xeff|\xb8\xb8\xae\xf2=\x8b\x7f\xf6<SY\xc0\xf1\xac\x11z\xec\xf5F&Q\xc7R(Gu\xee\xf9\x1a\x8cP\x94\x07;V\x16La}`\xcfN\x8e\xe81m\xd7\xeb4\xa2\xc1\x97\xee{\\X\x8a\xb5\x95\xc4g\xe4bh\x12\xd6Vd8\x8a\x97Vx\xbb\x8e{\xa8ca\xef\xd4A\xe4\xfaZm\x9e~\x0517\xc3\xd7\xe2\xac):\xcf\x97\xf5\xbf\xe4\xe5\'\x97\x1d\x93\x96\xdd\x8dgVZ\xec\xf3\xb5y\x93\xe0B\xdf\xb8\xed\x81\xbcRk3b<\x03\xb4\x14V\xe4\xdbg\xc7-\xa1\t\\{\xc0\x95\xeb\xeb\xc1\xfe\xd2U\xbe\x1b+\xa2I\x15\xbb\xf5\xeb`g\x14?$~K\x80ab\x86\xe8X\xfb&\x95}v\xd0\xf6\x96C\x15\x18w?\xdb\xc5\xc8t\xe1\xf8\x9a+\x03\xda\xe9\xf3\xfc9)\x94\xa0\x9a\xca3R\xdd\xf5l+\xb5\xccG\xa0\xbc\x8e\xa46*\xa1\xb1\xb9B\xc46\x00\xce|\xe4c-\x07:$\xd6\x96\xa8\xdf\xc2Bf\x86>8M6q\x13m\xe0y\xaf\xfe\xae\x07\xfb\x8e\xad\xdf\n\x1f\xe53\xb2g\xd8\x97e\x7f\xbc\r\xf7\xe4\x14Eb\xf2-wb\x07"\x85\xc8\x96\xd9\xe0=\x83\xad$mh\x00"\x02[\x15g\x00Ipo\xc5\x9d\x9bf\x1b\\\x13Zn\xf1\xd9\xc4\xb0[\x1c\xa1\x93\xaf\x0c\x9c\x93\xdf\x16:\x9d\x89\xfe\xc19\xdam\xe2\xb7\x08\xf2\xfd\x07\x83\xb2\xb5\xc0\x0f\xa4\x0f\x93\xe2\x18\'\xe3\x1e5kYNimT>\x0e\xf3\xccgiS\x05\x08\xc4\x7fkb\xa2 >1u\xb7\x9a\x14\xc5O\x90\xca\xa7 E\xe7\xe4\xf7\xdc\xc4\xf6\xe3s\xbd\xd6\xc0\x8dhfh\x16\x95$z|\xd6\xfc\xed\xc8\xda\x98\xf0W,f\xf9\xc9+3\x8b/vd\xde\r\'\xc6\xee\xa3\xb1\xfe\t\x13/\xcd\xac\x9e\xc6\xd4\x17|L\xe9\x980\xe3\'1\x06C\xe2.\x85\xdc\xf9\xf7w^\xdc^c>^B1H\xcc\xa4\xe1\xecy]1\xd3\xe5\xb4m\x01\xc6\x02\xc8k4\xc6x#\xa1\xabv\xd5X#\'{1\xfdf\xf4*Xs\xc4\xfa\xcf;\xa8\x80\x14a_\x80R\x87e\xd2\x16f\x83K\x8b\x9e\xe2\x88y\x10\xfaj\xdb\x0b\x10>7%]?\x04\xd5\xf7\xd2\x03\xf8\xf4\'v\n\xab\xa9\xd6xR\x83\xf6\x7ferBeH\xe9\xda\xbf",\xf9\r\xb73\x9f\xc1%7p\xb4I\xc5\x83\xedr\x17_\xc5n\xaaXW\xa7=\x9c|\x19\xe1\xf78Lci\x03\x19[\xe3\x87\xb5\xd5N}1\xac\x12Z\x1eo[\x87\x9674\x93\x96IN\x9a\xf4\x1dH%\xee)\xd4\xc2t \xb8\xdd\x80\xe2V\xd6\xb6\x87\xa1\x15c.uPC \xf0\x12\x9d\xf6\xa2\x9e\xf3\x19Pcq\x04%\x81-6&r\xec\x9b\xb2\xb2V\x95g#1\r\xa6\x86\x89\x8cZ\xcc#\xac\x03\x00kS8oUc\x8a\xd6\xdbHp\x83\x9d\xe7E\x90\xab\x1aXpl\x82;\x16\x04o\xe0\xccE\x7fG\xe0\xbd`\x9f\xe1P\xf3/\x17\xb8\x80v\xf2^\xdeMk^\x80\xcd2\x03\xacGxo\'"ki\xf7\xac\xc3\xe9O\xbe\xe6\x88\xc0_\xcbE\x81\xd0j\x17\xde\xe2\x96\xe9\x0e\x85!\x8dm\xcc\xd2\xbd)2\xe1\xb27\n\x18\xd7\x1b8\xa0S0^E\x0e,\x05\xec ,\xe7\x0f|\x7fJN\xbe0"u,\x14\xe0$5S\x9cJ\xe6 \xe6\x8c\x9c\xfb;\xb1\xf3\x1a\xd2\xdcP\xe0Tw\xc6\xdb\xbek\xaa\x8f\xc4W1r\xb3\x1cB*\xf5\\Iy!\x0c-M+\x83z\x1e\x9d\xc8\xb4/\x1d\xbf\xc6A\xd6^e\x83\xf2~\xab1\x89\x94\x0f\x0e\xedQ\x1c\x1db\x1c\x97\xee\xc4t6\xef\x9b!X\xfc%\xa3\xbb\x85\xb0\x96n"\x80\x91\xa3\xbfR\xac\xcd\xd4\xa1\x0b\x19*m \x03\x83a\xba\xc7\xb6\xc4\xa2\xb3X\xf6\xcf\n\x9a/\xc2.=\x86>"O\xb0P\xc4,;\xfa\xa4.\x04A2u]FV\x04M\xda0\xac0a\t\x04\x07vz\xe4+\xb5\xddU&u\xfai\xf5\x94\x13\xc5oYAE\xd9\xd4D\x84\x01\xbbu\xcf\xa9\xe3NfU\x92\xa4\xabY\x97\x99S\x1au\x81\xf2u*\xb1\x85\xc9\x93\xb0\xe2\xef\x9de\xa2\xc9\xaf1o\xf7\x1e\x8c\xc5\xa4\xc2}\x1f\x8bX\xd3\xb0\xedKDzp\xe4p\x7f\x1dH\x0e\xd1\xea\xc4J\\\xa3Eh\n\xe6K\x96.\xef\x071\x1d\x03\x00xH\xbf\x85\xd4\xf5\n\x9d\xc0 |\x94-\xdf_\x94\xd9\xec\xce\xa6{d\xab\xed\x97\x17\x85\xe5\xa4\xd17<\x1d[\x98\xa4\xa5]\x87\x11\x92\x14\x80\x99\xbfZ\xe0\xbe\x825\xb3n\xeaR\x12$\x9bI\xd8\xad\xff\xf6\x94\xa3\r\x83\x0f\xda\x05)\'gr8\xb31{\x14H\xfc\xef?9\x01\xd6\xc0St&\x07x\x0f\x15B\x9e\t\x9c0RK:l\x97\x0e\xe2\x14\x0fIg\x92a\xb71\xb8OFM\xa2\x019\xe8^(\x97\x9d\xca\xd7,\xa1\xe8\x99E\x12\xc8\x88\r\xb4\x03\xa1\x9b\xe4\x11E\xc8\x03\xde\x94 \x8dL\x8efOH\xe1\x96G\x8e\x7f\x05\x91V(\x04|/\xf4\xba\xcc\xa4\x85\xca\xcb\xfej\xdb\xa9\xdcR,\x06m4\'\x08+\xe1\xd2\x87\xdc\x8b\x924\x0bx\x9b\xf8\x00<\x10\x9bF5\xff\x16\x05b\xd4\xac`\xf0[\x9d\xd6\xee\xbf\x18$\xe2\xb5\x17\x8b\xc2yu\x87\x0f*\x9eC\xa6\xeejN@\x94i\xe4\x8f\xa9\xbd\x1a\xb9\x14(\xe7\xc3\xae\xed\xfb\x81\xf6Pf\x1c\x01\x85=G\xc4\x06\xdc\xb6W\xe0aUL\xcf\xc7r}\xf0EK|\x7f\x00c\tS\x89\x8bi}i\xe8\xda\xcd\xd6/so\xe9\xa1\xb2\x0c\x81\xb2(\xfa\x87mh\x83,\x8ci^v\xe2\x8c6\x83\xee\x866\x01\xd2b\x00O\xb3\r\x81\x05\xd8\x95!d\xfb\x1a8\xdb)1\x88\x80\xe7\x01\xba\xcb0\x8d6\x8a\xc38\x9f\xbca\x16\xaa\\\xc8x[\xc6\xa2\x00\xc8\xc0\x95;\x81\xcdu#\x9f \xa6\xc1\n\xe3\rA\x83\xbb&\xa0\x82fq\xee\xbb\xa1\xf1\xfeD\x08v\xe8\x01\xb4\xc7\x99pw\xcc\xaa\xd6\xd8?\x7f\xd5G6\xd0}\xd1$\x1c\xe9\xec3\x80\\gmrJ\xdc\x85\x02X\xa4\xc1\xad\x91\xcdJ\xcb\x8bJ\xeeL\x1cL\x02\xe9\xa9\x1f?\xd7\x83\x8c\xb5\xb6\x10h\xc5\x9a<j\'\xdb\x7f\xe0\x97\xb3k\x85\x07\xd9\xb3\xdc<E0\xb1[\xa1\xb1\x07Z\xbb\x18QU\x18u\xab\xc5\x18\x1c\xfcX\x9d9\xd9\xa5\x0b\x1a$7~W\x18\xbcl\xb6\xd6\xce\xa6\x12\x90\x0e\xbf\xfd\x99\x9f\x12\xaf\x9d\xb0\xad\xc9W\x91L\x93\xb9\xe4-\xc5\xb7\xae\xbd\xb0\xb5\xb7nEe\r\x07\xffT}\xe5I`\xdfa\xe9\xf3?\x00\xb81\x997\xc6\x1aMC\x8f\x94\xb2l-V\xd06\xe3\x10q\xaf\xd2V\x9f\xa7\xacs\x97{\xc9\x13\xed\x96\xdae\tl\xbe\xb4\xe4\x82!\xb5\x95\xd3[%\x0c\xd4\xf6\xb3\xed\xda\xd9\x9c\xecf\xb29\xd9\x98\\;sr\xedl\xdb\xb6=\xd9\xb6]\x13&\xd7\xce\xfe\x9e\xf7\xe0\xbb\xde\xbf\xe0=\xb9\xd7o\xdd\xeb`\x9d\xdc\xebZAP\xb9\xefw\xbf0\x9e\x066\x190\x1b\x98\xd6\xf3lw\x1e\xaf\xb2s\xe4\x16j!\x93(\x80;\xceCk\xea\xe3\x02\x8c[\x80\x1249\x7fp\xd0\xc7>\x0e\xe0N\x08\x7f\x95Y\x9eP\xbbi;\xf4\x91\xdf\xd3\xe4o&\xe4\xd7*\x03\xcb\xc2\xabnbK\xd8\xad\x05\xf3O\n\xd4\xc9\xe5\x17L3jR\x98\xfaL\xccK\xdaG-\x99%\xdej\xc3Y\x8b\r\x12\xbc\xc7=_\x95mN\x8a Z\x84\xf4\xf6\x15n1\x92h\xde\xa2P@\xa0\x8c*#f\x96\x1f\xdc\x9a\xaa\x10\xa5\x06\xe8\'\x86J"\xb3\xbbL\x8b\xbb\x9a\x1e\rJ\xbe\x9b;g\x92\xddf\xeb\xf4xkL\x8em\xe3\x7fDg\x0b\x1e\xca\xb1S\xa85\xb8\xecy\xd2\xf7\x99}\x81=\xb1\xb8\xfd\xb5\xfdMC\xed\xb6\xad\xb1\x06}&\x8c\xa68\x1b}\xdb0\xf1\x0cC\xc7\xaa\xd15\xcbb\x8a\xfcJ\x97\xda\xbc\x1a\x15N\xdb\x13%\x01\x14\'w\x07b\x1f`\xbd\x01\xac\x84\x91Mv\xafp\x0f\xf5\x86x\xba\xe6\xcd\x91\x15\x87S\n\x1f\xc9/\xdb\xf4t\x97\xa6K\x90W}iD~=\xb1CP(\x084\x03\xd4u\xb3\xc6\x8c\xb2\x8c\x82>ks\xe8\x99\n\xe2\x94\xe3\xb2\xfc\x1f\xa6\xd5a\x14+j\xc5Mz\x14p\xf2\xe5\xc8}\x85\xc2\xb3w\x05\xd3\xf9\xe3\xdd\xb5\xf1\x05\'o"\x11_\xad"\xf3\xdaPg\x8a\xe9\xd7\x8e\xbe^\xa1\x07og\x0f\xa7cn\xe9u\x11>\x9bPc\x11\xcc\xa7\x18\x92\x01\x9f\x01\xb6\xbb\n\x04\x14\xec\x83\xbd\xf3\x16\xe7\x153\x88\x90\xb1\xae\x9e\xb4\x04\xe2\xa8EE\xdb\xe7e\xec\xcbm<"a\xa6\xac{\xeb\xbf\xb0\xeb\x8bo\xef:\xed#\xb3BGS_\xbd*Myv\xe7w\x14\xf7\xc0Ml\x8f\xc3\xf2\x9e\xe5z\x96;\xabG\xf6U\x05\x06\xa7\xdbt ]\xc8\xf7\xae\x1bL\x8c\t\x87)yM\xebc\x92U\x02\xfaYP:\xf6\xe51\x15\xe8\xb7\x0cW\x05\xba\x88\xca\xa8\x94\xc7Z\xdb\xe6UD\x14T\xc5\xf6\xaft5\xf2\x99\xd9\xb42\x87B\x0f\xe26\xbd\xf4\xfe2\xd2q\x8f\xbf\xb8lM\x0e\x8d\x02\x89G)\xcc\x1b\xb6\x0f\x1f\xe7\xa1\xad8\xd0\xba\xdd\xbeO\x06\x9d^\x89]\xbaj\xce\xb8J\x8b=\xdb&\xef\xe4\x1d\xe4\xc4v\x17\xd8\x92<\xc2\xe4\x92\x1c\xddl\x81\xf8S\xd7d\xac\x97\xa2\x04\xbd\xaf8\x85^m\x00\xd5\x08)c\x87\xfd\x1b\x82"\xfb`.w\xdbME\x8c\x0f\x03\xc6\x08R \x8d\x0b\x02UQ\xe7\xb6g\x9f\xf0m\xf2\xa7\x06d\xd7\xbb\x15k\'\xaa\xc8\xc1\xads:\xfe*\xd7\'\xa7\xf5L&s\x9e\xf5~u\xa3\x9b\xbf|\x88\xae\x1d\x01=O&\xf9#k\xef\x84\x85%\xaa"\xc95\xaaP\x99}v\\s\x16\xbc\xe4\xbb\xdf\x16*\xd3\x92\x93x\xbcp\x1b\xf7\xa8;\xfe\xba\xdbG\xe4\xbcl\r\x0c\'\xa7\\#\n\xc9X\xf4q\xf1\xaa\xfe:\xee\xe8\xef\x9e\x08Y\x9a\xbeg\xc0\x01\xb5\x0c\x82\x04N|\xb8\x90/F\xd9\xaf\x89\t\x1c\xb9\xc5\xda"\xc5\xab\xe7\xd8P\xa3B\xdc6\x93\x8d2\xccq\xa5o\x0e\x12\xcb\xd9\x98\xfd^\xc8\x8a\xa9\x0eVKl\t\xe5+\xcdeL\x88 \xad\xfe\xf7{x\xe4\x01\x92\xf7\\U\x8c\x8d\xe4\xa9\x8d\xe12\xb2\nQRk\x16\xe0k#H\xd78B^\xc2\xe9\x06\x1d\xb9s\xd0ahVP`\xb6f\xd2cb\xca\xef\xbawPT\xd1P\x9b_\x85\nP7\xaeg\xf8<Q9\xcaX\xca\xfbn\xfbS\xd2\xa4\x81\x11\xdf\xa1\xda\xe6Yk5r\x01\x1c/\xcc\'\xfevr\xd6\x9f\xad\x1e\x8cb$T\xe4\xb3L\xefZ\xde\xe2\xd4\xd1\x82\xd0\x99\x9dI@rXB\x1c[H\x84\x9fw\xf3O\x9d\xd7\x0bw\x0f\x06\xd6\x1a5X\xa49\xe7\x80\xe0\x150X\xe4qi\x9e?<K\xac5\xb5)\xc3Li\x03\xfd%\xda\xbc\xb3\x1e\xe9\xe2Aa\xbb\xbc\x15TU,y\xdc*\xady\xce"L\xc1\xe8W4\x96 \n\xfd\xf8\xfb\xdeyr+(\xf1\xea\xe9\xcc\xa1\xa1!\x16\xear\xbdE\x9dq#\x80\xb0\x06f\xa2\xf6dt0lQ\xd7\x16N\x1a\xf7\xfeb\x1f\xa0\xac\x88\xf4\xf7\x15\x92w\xa2\xdd\x8a\x86+\xb0_B\x19{\x84\x7fE\xe5\x03\x97\x06\x94!\x82\xfe\xeb\r\xe1W:\xa0\x02\x7f\xa9?\xef\x14)\x87}\xe0\x86\xac\xcb\x9a\x83N$"\xf5\x91\x19lv\' U\xf7P\xd1?\x1d\xfe\x0e7\xe8<\xf1\x87\xa9z>)\xea\x82M+\xf8\x90\xccqr;\xf53\xf7_\x85z*\xf4\x13U:(\x00P\xef\xedy\x7f<\xfa\x99Bl\xc4\xa3\xfa>_\xfe\xa7o\\#C^\x16\x9c\xddn\xad\xeb\xb1\xe4iL.g\xf3pL\x0c\x9f\xfb\x07\xfd\x07\x8a\xb6T\xac7\xd2\xd4\x124\x91\xae\x00#/\xd8b\x82\xc5\x86K!T\xf3srR:l\xbd\xc5\x99\xc8\xe9\xac\xa9\xa8\xae\x08\xe0X/\xed\x1d\xbc5A\xe4\xa7\x93\x97\x96$x8\x93/\xc1<\xa1EQo]\xfe\xe9\x8a\x97u\xd1\xb9\x10\xe9B\xd7\xed)\x93\xcc&\xadqj\x05\x89p\x80\xe6\xb8\xb8F\x1d\xe1p\x1d\x87\xeb\xc5ib\x08\x87\x92\xc2!\xd7o\xcfU\xca9\xc9\x97Y\x1e\x89\x851\xf7\xd2\xa0\x0e^\xe7z)\x873\xfe\x9b`\xec\x9e\x8a\x84\x07\xc4\xae\x02#\xfb,\xb8\xda(rD\x86c\xa4\xfc\x08Aq\x0f\xcb\x93-Y\xad\x95\xbd\xd3k\xa9\xa3\xb5z\x8a&\x8cl\xf5\xe9\xa2\xbb\xd2`A\xad\xfa\x90\xab\x8a\xa4d\x9e\xfe\xfd\xe1[\x95\xc8\xb3\xba-\x80\xfdfBNcO\xd4\x17C\x19\xe2\x96S\xcd\x03Q\xe8{D\xc8\xb4\xf3\x10X\x99\xb8\xf3`4\x1c\xbe\xdeG\x9c\x89!\x0cx\x19\x9dBT\xdc\xaa\xdd"\x1eJ\xc4\xbb\xd0u\x01to?\x07\x0f,\xff\x8b\xbb\x8bn\xbf\x9bk\xf9\xfb\xf5/\xe1\x8a\xc7\x8b\x9a\xcb=\xab\xc0\x1f\xcb\xe2\xa2\x9d\xd12 \xb1Y\xd4$\x07\xee\x0f\x8a=m"Z\x12\r\xcd.\xccj\x8d\x1e\x96\xc0\xf9\xb0@\xdeqg\x9a\xa9pj\x13i\x87\x8fn\x1b\xf6y\x1a\x01\xad\xa3\xb4w\xa6;\x7f\x1e\xa1\xdf\x99\x1e{c*\x9f\x87\\\xa3\xc40\xdf0\x17t\x1d\xc5\x19\x92\xcf\xd5\xd56{L\xb0-\x9b+\xda\xf3\xa6\x8a\xa14\xd6\xf55=fD6\x81\x8b\xe7!\xad\xf8\xb9\x18\x96\xfb\'\x05S\x9e\xe7\x8c2K\x8f\xcd^\nXa\x89\xdck\xdbrK\xca\n\x82v\xef%\xb5\x82\xf6\x84\xe2s=_\x0e\xd1\x04\xa5d}\xed\xe8\x9baI\xa7\x16w\xba\xbaM\xb43\r\x01\xde\xe8r*5?\x0c\xba\x82\xff[\xee\xbc\x96]^,\x82D,0\xfd\xb1n\x81\x85\x055\x9b\xc7\xad\x8d\xbb\xc5c.\xed\xbb\xfb\x11\xbcB\xf6\x0c\xbf\'Fo\xc8\xc1\xee\r\xe1\x87d\xde)\xc4\xd0\x07_W\xf4x\xc0\xe7\x15j\x9c5\x8dZ^\xd1y\x82\xef\xb3<\x91\xba\xd1\x97xf\xee\x19\x1a\x08&W/\xcd\xfcq1S>S\xa2\x9bf\xd9\xb4\x0f\xd5\xce\xd9\x96\x17\xf4L\x8fD\x8b\x8du\x0b\xd50\xb0\xaa\xea\x87\xbds\x9d\xbf\x1c\x10E\xa44\xbd\x85sVX(r\x82y\xf8\xbd\x0e(V\x13\xbd\xee\x14\x9a\xa9\x08bRU\x92M\xff(\x1c\xbd\x0f\xdf\xec\x8b\xb5\x13\xfb\x8d\x8b9B\x96\x9c\xe45L\x89j\xd1\xe4O\xd2\xa5\x9e\xca\x93\xc0\xd6\xfbz\xb70c\xb4&2\xaa\x1f\xd7\x9c#\x89\xb0\xb3\x94\xa8\x17\x97\xbe\xbdhh\xfd\xb1\xa2\xe7_\xbcv\xad\xa1\xad\xc4%\xdbky\xf1\xce(\xf9\x87\x99\xf0%F\xe7/\xb5]\r\xcf\x8d\xd8]\xbd\xebX\xc3^\xd6\xcb\x87L9\xd6\xd4\xfe\x7f\xe7\xebe\x91O>3ub\xe7\xecz\xf98\x1fL\xae\xd5\x91\x0b\x91\x16\xff%U\x05\xeb\xccOSu\x82\xe4\xfc\xc1\x91H\x9e\x07\xa5o\xafj)d\xc2\xf48#\xe5,\xcas\xe5\x93\x13\xe9u\x82\xf8S,C\xce?\xd3\xfd\xc0&\n\xe7\xc1\x95{\xfct\xda\xf4\x83\xca\xbc\xf18\xbe\\\xe6Y\xc1\x08[\xc1\xcfIN\x8e\xe4R\xe7\xad\x88\x119A3\xcf\xa0\x05\xce/W>\xd1\xc4\xd4r\xd6\\n0\xe82\x15\xd4\x7f\xe7\xb7b\xe4!\x1b\x86\x0b\xb7\xff\x86)\x8c=\x1e\xa4\xdd@\xe9\xd0J\xf58;50\xa4\n\xde\xderVU\x83#v\xfe\x84J\xdcdz\x10VY\xe0Q\xd9\xa2\xa3\x8d,cm\xd0\x81~\xb9O\x9c\xa9!Ai(\xc9\x9643j\x04\xfd\xbc\xad\x1av}\x89\x06\xe4j\x00f,p\x1a\xd1\xaf\xbc\x8a\xbd\xd2\xd0\xf3t\n\xa7:s\x9e;\x80\xdb\xce\x18!\x13\x80\xca(1~\xa6y\x12h\x1c\x18\xac^\xeb\xe1\x18\xca\x1a\xc7&\xeeEp\x8b\t!f\xa6\xff\x11\x17\x91\xf1\xa9\x9d\' \xccet\x99\x9d\x8c\xba\xda\xe0\xdf\xa4\xee\xcc\xb0is,\x03\x86fI\t\x07\xf2M\'Q\x04<\x87\xaf{\xaf6\x10S\x1d\xfa\x96#OQ\xd3\x86\xbfW\x87L\xe2\xbbH\xc7\x18\x01??|\xbb.\xde\x88\xfe\xe9~i\xf3?\x8dm\xf1\x97\xc2\xa8\xa1\x9a\xe9\xc3]\x1c\x93\x87O\xbe\xfb\xa0I\xdb]Y8=\xaeNe\xef\x9e\xde|\xc3\xfbI\xb3\xd64\xff\x95C\xa4\x93l\\t\x01T\xf0\xd4`K\x07T\xba\x04s\xc7\x0e\xd4\x938\xb2%\xa9\xc6\xc5\xa6F\xa8\x80\x1a\x99\xbbh\xff\xf6\xdc\xb1xs\xed\x94\xd8\x82\x16(li\xfex\x95\xbf\x0b\xa07\xd5<\xe7\'\xb9>-\xc0\xaa\xc5\x95I}FI=\xb8~\x19\xeb6\xda\xf2\xdc\xc0\xea\xd4\xc7@\x96\xe1\xf4:\xe5\xd4\xe2F\xe2\xdeB\xb6{?\xc3Ge\xc9\xd7(\xfa\xb0@\x9fJ\xbev\xf6o)\\\xc5:\xe9\xd0\x1a\xdd\xb1Rl6>\xc6-\x94\xfdDOV\xa2beP\xf1\xd8\x19\x1dj\xfe\x17\xe2\xc50\xb9\xfd\x84J\xf8\xad\xb5\xa0\xa2t\xeb\xba\xce\xdbE4H\xd7o4\x8e\xec\x04\xde\x14\x12\xc6\x064\x08E\xaa\xf8\xe9\xd49\xd2\xde\xf3\xd2\xdd\xa6\x8d\xa3#\xf7\xe8\xa9\xf0\xb4\x1f\xd9/k}\xec\x8e\xf6V\xcc{;\x8c\xea\x9d\xe8\x81\x15[\x80h\xdb4\xe6\xc15h\xff\x884\x0f\xdaA\xf9\xd1\xc4\xf3s(\x96\xc1\xdd\x8b\xeae\xab\'O/\x96A\x9c\xd1\x05rf\xedK\xdf\xf4\xc71%\xca\x07\x7f)\xd1Wn\xd6\xb0\xedY\xa9}\x04\xac\xbf\xfc5.\x95$op\x81\xdd\xcd;\xda\xd7\xe1\x8a}\xee\xb1\x00\xac\xa1\xbc)\xd7\xe9Cr\xa5\xb9 \x8d\xb0\xe4A\x0fd\x1eV!\xee?\xe9t\x91\xf34\xb2\xb8\xae\x04{\x8a\xa5JVe>\xc1\xf9_\xe6\xf3y\xca;1!\x1b\x7f\xc1\x18\xd0!i)\x18RNi7D482\xe6\x11\t\x15\x7f\xc9\x9f\xb4z\xd6W\xfcN\x944^n\xbf\x83\x18Sy\xb8"\x07j\xc3u\xe7\x1aL-\x1b8J\xf3\x9a\xa6C\xe9%\x1ei\x89\x13\x85t\xe93[\xc5\x95\x08\xdb\xe2\xd9\x81\xf0>\xf9M\x19&q\x98.\xabmwzC\x1e2\r2\xbc\x9aR\'\xf4tH_|h\xaa\x1d\x01\xfd\xe8 \x1a\xfe*I\xedx(\x99\x18\x8b\xbeH1\xbe\x02\x90\xd5Xt\x00\xcc\xe9\xb2\xbc\xa71m.\x0e\xe0\xb8d\x0e\xde\xb2x\xf4\xc6\xda\xae\xc2\xeb\x07+\'\x99\x1b\x9b\xd3\xadk\x9cu\x7fU\xf6\x9aw\xb7\xa33\x00b\x9f\xcb&\xbbC\x02d\n\x80\xa7\xef\xfak\x14\x96\x96\\#\x8fUzg\xa8\xb4\x9c\x95\xf8E\x9a\x9e\x1c\x15f\x93\x15\x1ce\xa1H\xd0\x8a\xe8/d\xef\xa7GW\xc7\xbfA\x96\xcc?R\xc4\xben\x96\x8b\xc4\xcc\xfa19\xed\xffA3\xb9\xef\xb9\xeb\x07r\xd4\xf9A\xdc\xa8\xb72\x0e\xa4Y^\xb8E\xa9\xcb\x868Ks\x07\x7f\xac\x11/\x19\x92\x8c\x91\xa2B\x0f"\xfc\xe07M{\xec\x95\xa4\xed\x0b\xc3$AZe\x04\x15\xcdo\xfc\xfe\x86\x08\xbft\xe9\xa3\xaa\x0b\xc1\x98ke_\xd3\xae\x95\xd3J\x07\xfe\xd4#\xeeCS\x11K\xa5\xe5\xa5\xd0\xc6~fr\xf9\x0c\xdcg5xK\x0f\xbe\xfbQ\xeat\xbc#Dc\xfc\xae:oa\xc8\x8c\xefTo\xec\xc4J\x0c\x95\xca\x96q\xeb}_\xb7qa\xcb\xbf\xfd\xc3\xbccFk\xe6Y\x93\xfe\xea\x93;I+\xcd\t\xce\xd6n\xae\xbd\x1f\x11\x9eC\xf1n\xf7\x8dZ\xf4z<\x9e\xa7\xaf\x90}\xae\xf8Q\xb2[\x83\x1e\xc8\xcf\xf0\xc9\xf1\xe4\xd8iR\x9d\xec\xf0il\x13\xa6\x18\xac\'\xbc"\xf29\x0f@\x1fr\xeb\x06/~\xa2\xb0L\xd4\x88q\x84he\x9e\x0e-\x0e\x0bu\x12\xe2p/\x152,\xc5\xe9s\xd4\x01\xd3\xd4\x1b\xf7%\x82\xfe<M,w1G\xfe0\xd7\x0bJ[\x95W\x83\x9dO\x1aR\xc6SR\x9b\xe2\xf9\xd4\x93SE3\xa8n.^t\x87@(\xda\x8e\x1bn[\xbf\x9e\xc1\xdaPRl=\x9c_\xd873|J\xcb\xf9M\xd4\xd29\xd4\xc3\xa5\x11\x7fOo\xdc1i\xc7q.\xda\xcd\xa1\xfa\xc5eL\xf6=\xd9\xe5K\x94\xfd\x0cA\x97\xb9\x1a\xa7r\xa5\x1d;{\xe5E\xb1[F\xfe\xa4\xacE\xc0\xeb\xc7b\xbf\xb09\xb3DQpA;\xf8\x8d\xa2\xb4\xba.\xc4\xc8_YSA\xf7+\xb9\xceT/\x8b\xbb\xf9"b\x86\xc3\xa5\xaf\xadw\xa7o\xff\xb1]\xd32k\x98fX\xca\x06*e\x9c\x04XV\xdf#\x85\xc8j\xd9\xbaS\xc71\xbc\xcb\xf4\x90\xc7\xaa\xbaQ\x165\xbfwB\xae=\x97\xe7u`\xbc\xff\x9a>\x96-!\xa2\xdc\xfd\xe3X\xd0]\xb3\xdb\xcf\xff\x86\xcb\xb9\xc9\xb1^h\xb9J\xf8\x8a\x88\xe1\x14\x17\xc6\x97\xcc\x85\xed|\xcb\x84\xe0\xdc\x92Z\x0b\x9c\x81\xa4S\xb2,\x13w`7b\xe5\xd6\xda\x10\x13\x93\xec\x19%\x1f\x08k\x08\x13\xefy\x11\xafQ\x90\xbdM\x08\xf8\xd7P\x97\xa6\xa1\xc7\x1b)\xae\x1f\xfej\xab])\xe9\x88z\xc9\xa8F\x01m\x04\xeaHLMX\xdc$^\xc0+/[\x1c\x12N$4\x8eF\x90v\x88\x19\xbb\x92\x86\xbc\xc0h,\xf4)i \x1f\x07\xf8\xe6y\xe8L\xaf\x1e\xbd#U\xd4\x9eL\xda\xf9\xdd\xbc\xf8\xb6w5\xa5\xb3\x9fN\xcc\xd9\x8b%\xd9)\xd2=\x0fh56\xaa\x9aer\x93\x9dK\xe5\xf6<\x94l2\xbf\xed85\xbb\xf2\xc7\xb9\x0e\xf5Q\x17\x13u\nBba\xb1\xcd\xd9d\xadE\xc5\x89\x9b\x03X\xe6K\t\xb0C\x18\xc8\x98\xe2wD\x007t\x7fn\x85~d\xc6\xd0\xcc\xe6\xfa(\xf1\x1f\xad\x85\xf3\x80\xb0U\x88d\x80^\xc8\x96A\x8f\xa2:\x87dbC\xf1\x9a\x93\x1e\xba5>2(s(\x0e\x85\xbc)C\x8d\x995\x8f\xaa\xdc-\xeef\x84/\x7f\xda\x86\xee\xb7\xc7\xb5\xa6\xcc\r\\\x93?\xden\xa4z\x83\x9c\xfb\x1a\xf2_\xe1t\xa5\xf4\xf4\xab\xd5\xe1\x9c\xca\xda8\xfb<V\x17\xa5\xa8\x8fg\x84\x0e\xca,\x08wd\x17\xec\xb0?b\xb9+\x96MRl\x17\x13tDe\xa0\xdf\x16\x94\x97kb\xc29l\xe2),Jv\xad\xf6\xbd\xa8\xbf\xdd2\xa0v\xfb^\xa7g|\x08\xbc\x85\xc3\xfe\x95\xce*\xfc#\xb8\x90\xf0\x97\t\xebA\xe6\xaa\xb0\x85\xed*\xb4B\x1e7\xfc\xf4\xdb\xfd\xf9\xce*T\xd2$\xfe\xc6\tO0\xca\x984"lU\x8a\'\xbd_\x14\xcb\xc7AO|\x8f\x93\xbfI\xdb\xdf+\xc9f\x1c6\xf3\x87\xe0Co(\x8d\x9a\xd2\xe1{\xab+\xae\xa7\xb6\x81\x04\xcc\x8auX\xb6\x94\x17\x91\xed\xb7Q7\xfe\x92\xd0^\xe4\x17`\xb8\x91\xcb+\xfa\xcd\x94\xa0\x1eZgcep\xd2\x88\xdc\x06w\x1c\x11H\xbdKT;&\x92\x90\xd0Fk\xe2\xe3\xa3\x1aN,\xa9#_\xa72\xa8\x12\'\x0c\x03T\xf7\xd8^A\xe1\xb6/\xfd\xf3\xc7\xe0\xbeS\xed\x16\xdf\xdf\x0e\x0c\x8c\x9c\xbf\xeeF>\x7fh\xdc_Me(\xb0\x1f\x8e\xf4\xfb\x00\x8b(\xb0\xcc\x90o\xfd\xa5B\xdd7\xa4\x1e\xbf\xd7#\x9f\xb1\xc0\xea\xf1\xa7(\x07\xbd`\xd6$\x03pM9\x1fq\xd1\x8c\x84\x0f\xc1\xa83w[)\xe0&{\xac\x03\xbcfey\xd8t\xa3\xa8\x15m:+>\xb8\xd5\x82/\x81KR\xef0O\x00c9v\x9cM\xf5\xc0\x90\x96\x93\x8c\xa9\x7f\x8d\xc4\x80\x88\x1ai\x95\x96\xc4$\x8e#\xfb-\x0c\x1e\xe46Vj\x00gC\xe3K\xb8\x8c4\xc5\x84\xbf\xf0\x80\x01-k\xcd8\x90g6Qh\x93\x16\xa4\x92\xdc\xf4GClz\xcf\x12\x8f\xacz<VU\xfd:\x8c\x7f\x9ee\xb4A\x1c\xf4\xfe\xcd\xed\xea\xda\xffFX\xf3\xd30\xe0d\xac\xe1\xe6#e\xfcg=\xa6\xad\xa0\xabQ\x19W\xb3!%\xde\xc6(\x81\x9cm\xe1\xc7Q\x08\x04\x90\xda\x89\x1a\xa8Q\xbf\xaa]P\xbf\xbc\xcf\xee\xbe\xc6\xbb\x90I\xfa\xcc\x02!M[\xb8\xf2g\xca\xb6y*(b\xb9\xa3\x1f\x1c\x8d\xca\x8112nHp\xbd\xc8q\xe8!\xd4#j\xe2"Z\xea\xfd\xf2\x18\xef!\x19<\xab\xc4\xa8\xf0\xec\'IS7Du>\xfb\xe0\xd1\x1e\x8a/\xd0H\x1b\xfb]\x9d\x81\xcd\xfb\xf6\xe8\x1b!\xbf\xbd\x03DQ\xc7\x12\xe6\xcf\xb5%|\x15I\xf7\xca\x7f\xfa\xfe\xb2MB\x0b\x85\xb0y>\x0e\xe4@\x11\xd4\xd1T\x98\x10jQH\xd6oE\x0e\xb8V\x02\xb6Z\x8dQ$/t\n\xe9\xc2\xb6\xb2\xd1\xdd\xf8\xbc>i\xe5\xab\x1d\xfexaykK\x96J\xf2\x87w\xc1\xcb,p\xcdF\x81Q\'a-\x1f\x9b\xd0\x11\xefl"\x1c\x98\xdd\x0fv\x88f\xc9a\xd1\x08\x90`\xc9\x9b\xdd\x87\xad\x1d*\xf13\xa1\xb1 U\xd3G\xcbs;\x89\xdf\xd6\xa9 \xbb\xa6\xcb\xbbN@c\x0eO,\xf3\xba\x0bqx1\xf4A"\x93\x94\x9f\xde\xa7k7B\x8a\xb2lSd\x8bRIC\xb7e\xce:\x98\xbcF\xd039\xf3\xd7\x9d\x96\x1f\xca\x81\x9c\x8fL\xd5b\x7f\xf3\xb0\xe5\x91\x834\xdf\x91\x8a\xed\x14S$\xadW\x85\xec\x8e\xab\xde\xe9|\xd8\\\xa7>*6h\x18v\xf9\x80\x9a\xef2\xb4H\xdazo\xf5\x0b\xd14\xa6\x12F\x9b\x19\x19\xdf\xda\xd4;\xd2\x93\xed|\xad\x7f\xdbagr=:\x91]\xe1\xec5\xfe\xbas9\xf2}\\\xa4\x95\x98\xfaJ\xb7\xfc\x85Q\xde\x90H\xca\xe1\xa0\x89\x97Mh\xfdN\xb7\xdf\xaf6\xc1"\xa2#k\x81)\x99\xcd\xf9|\x83+\xb3]I\xf5=3\xc9u\x86\xf3\xbf\x1cAZ\x08\x99\xdc \x1a\x08@5@\x00-}\x98A"\xa4\xc2N\xbaG2\x98\xaf\x14\x94C\x8d#\x1f~K\x8c!;\x8a\xfdx\xfc+e\x8d\xff\xea.\x8f \xa2\x19\xbd\x17\xa3\xc1i\xc8\x9a\xd9\x1dK\xdf\xe9\xf4\x1ef\xed]\xe5\xd7\xc51e\x86\xb2\x9e|u\xe0\x9cv\x0f\'3\xc9&D4yOD\xda7:\xafpBd-\x9a\xd5p\xaay\x99\xcdD\xefg\x99\x1e\x93\'\x81\xc8< \x99\xd8\xd6\x17\xba\xf2Z\xe0\xbcc\xbbgo\x952\xe5\xcf?c7\x93%\x8b\x96\xda\xe0\xa8\xbfr\x97u\xe0\xdc~\x06S_\xe2\xf3\x15\xab\x9e-6I\x1fbCt\xcd\xd8l\x0e-\x1d\x01^R\xde\xaeu\xe6|9OBo\x08Ko\x17\xbf \xf3\x12\xaa\xd29N|\x02j\x81\xadv\x93\x08\xa7\xdat7\xe9>{\xbf\xb7\xfa_\x03\xf3w/\xf4\xcd74$\xcbG\xf8,eg\x83\xe7\x8d\x0b\xd4\x99E\x1f\xfc,U\x94K\x81\xa4\xc1P_\xd9pE\xab\xd1-\x00\xc8$\xcan\x82\xc3(4\x10\xe7\xdboN\xbb(\xe4U\x07\x96\x0brD\xde\xbdD\xb2(\xae\xb3\xe9\xe3\xac5\x81\xc8e\xfd*\xfew\xe3\xbel4\x02\x06\x1az\xf6\xca\xb4\xc2\xf4\xe7\x8f\xc5d\x7f\xa50X\xd2\xd9\xeeB\xdf\x9f\xfbk7\x99\xf0\xec\x91\xecIo\xd8Yq\xdd\xa1O\x08\xde\xb9K\t\xbb\x84O\x1f\xb5J\xa5\xc7\xf2\x10D\xc2F\xfa\x9e\xb1*\x95~\xc0i\xb2\x95\xe3Y\xda\xb8O\x80\xd1{\x9a.\x11\x17\x1a~^\xde\x99MH,B\xe6|_\xdf\x83f\xc9\xf2\xbe"]\xe3X\x13\xdb\x9b\xa4\xcc\xcf\xa7b\xa7|\xcd\x1e\xbc\xb1\x0b&\xb4\x08\xc8y#\x1b\t\x9d\n=\xaa\xecfi\xdc\xc2\xc8\x076\xd1\xd9J!\x02u\x9bm!q\xea\xc7y\xff\xbfo\xba\xba\x81\x87\xca\n\xc3\xf9\xf7mu"@\x87\xf6\xa5x\x12_T\xe9\xe9\xf7(\xb3R\x1bRz\xc1\xba\xa7-0Z(\x1f\xd1\x06\xf6\xf7\xe9g\x1d\x02\xfc\xea\xfb\x06H\x06\x84\xab\xb7\xdb\xf05U\xb9]u\x177\x9cN\x91|\xff\xd4\xc4\xab\xb1\x17K\xbd\xabM\xb7\xce\xec\xe8\x1b\x1c\xa1vd\x13\xa8\x1as\xa2b\xff=\x05\x15\xe8\xeb\x87\xdf\xdae\xcc\xeci\xd1dum\x98*;\xe3\xbb,\x89W\x8cbF\x8f\xb7\x94\xca)5\xf1\xd8^\x94\x14V\x85\xdd\x86\xad\x01l\xc73\x86\xfb\x0fQy\x02A\r&M\xbe\x93ch\'\x8fj\rt\x84\r\xb7\x00G\xa4\xe9z\x08"\xd5@\x04\xaf\xb7L\xf1\xa4Z\x88\x06\'cf\xd6G\x99\x80\xab\xf6\xe6g\xc5\xd7&\x84\xa5\x9add&\xba\x079\x8d\x7f6\xe5\x16\xd7"A\xd9T\xa9-\xbeA\x9d\x0f\x8e\x86$\xbf\x1dy7N\xf6\xda\xf4F\x0b\n\xa1\x95\xab9>\xd5BV\x06\xf5WImI\x1cp\xcc\xaf]\x96\x0b\xb7}\xcb7\xb8-\xf6\x9e\x9e\xa13]\xe1\xfd\x1b\xb1\x80j\xd5\x0c\xd1\xa2\x03\x81\x1aw\xff\xae\xa3o|5\x13\x0e\xa2Ry\x8a\xec\xa9O\x05\x9a\x037\xde\xc6\x19o\xf4\x9c\xc4\x94\xf4S\x1a\xacyY\xb1iN/d\xfe\xe2%\xb9.\xe8HyM\xa7.}3\x8bt\xae\xf6\x8e]\xa0x\x85XT\x85\xb8o\n\x97k\xea}\xc7!\x82\x93f\xbd\xca\x15\xe9~L\xff\xf7\x8b\x12\xa5\xa1\xb1\x0e\x8dl\x07}\xf4\xf7cF\x96n?eR\xcb{WB\xfd\x04\x0b\x9b=\xfd\xb7\x9f\xbc5z\xb3\x95\\\xe3h\x00\xee\xc9\xb2kb+\xb1d\xa7\xbf\xe5\xb0=\xfd\xf6\x13\xc8\xbf\xcb\xbfO\x18\xf4Y_M\x9d\x01\xcf\xf1G\xaf&\x13\xb9\xb1\xb2<t\xfd\xc9n\xafcG\xdd;\xaeJ\x11uD\x02\xac\x1b\xc6\xb5Z\xb4\xe0\x0e\xa8y +\x8b\x05\x02\xc3\xa8\xa3\xc9MU\xf4\xc7H\xe7\x8e\x97\xbb\xfd\xa8,R\xb6\xb3\xfd\xfb\xac\xfa\x18;x\xdb8`U\xd9\xa4\x88\x85\xaf\xa6\x8bd\xcf>\x18f\x05\xb5\xd6\xf7]xK\xc3\xa1t\xc2@\x9b\xaf\xd3\xe70\xd7\x90\x8c%\xc1\xae\x99\xb7\xdd\xcc\x92h-\x94s\x9c\x85\xa9\xb9\xfd\x03`\x9a\x1fIi\xf5\x14\xc4\x8c\xacc\x13r&\x9a:\x9f\x834\xc5e_\x06\xce\xe6G\x19\x9d\xb1\x12Y\x89*\xa2\xaa\xc4\xf9\x8e\xae{\xb2\xf4V\xd2>\xf85\xa7J\x95\xd4s\xe6(\xd8=7\xdb\x7f\x0cw)\xd0dq|\xce\x92\xe2\xc7N\x94\x12\xac\x1d\x87\xe6/\xc7\x84Y\xa8\xab\x11\x87)\xbb\xafq\xbdjz\xa9\xfc\xb1\xff\x01+\xe2,<\xb7\xdd\x81]\x0b\x15Kj`\x12=\x8e\xf7\x0c\xee\xfao8\x91\xbbz\xca\x06X4\xa9\xb1\xd8N#\xaa\xec3~\xdf\xfe%\xe3v\xc84\x8bT\xf7n))\xd1\x88\xf8\x8b\xa9/Ih9y\xfb\x8ev\xa3\'\x05\x9aH\xab\xd89Z\\h\xf6\xbdso\xf7\xa4\xd7qn\x98q\x1e\x16\x0f\x89\xd3\xdd\x96.[\xd3)I\x9c\x9f\xc1\x19\x0f\xd3\x7f\xb6\xa1\x0cG\xc0\x07`\xe0\xf7?\x06\xf9V\x8c\x18\xac\x17\xc5\x1dhZID[\xfa\\\x0f\xe2\x87\x97K\xc8\xafn!\x08\x19\xbf\x86\x11\xe8\x98\x07_\x92\xba\x1e\\c\xa7\xeb.\x85\x9b\xba\xd7\xab\xb0`=h&\xc6{\xf2B\x1e\t\xed\xe1p\xf0\x12R\\\x19\xa4\xd6\xfb\xd4!\xbd\xa2mH\xa7\xfb\x8e\xdf\xdfJ2$\x93\xcc\xb6b)>\xa7aO\x06[\xc4\xcf\x17\xef\xf6\xd2\x0c\x98\x12\x1aq\x02`\x9d\xae\'\xae$\x15\x18\x1c\x83\x9b\xd7F\x97\x7f\x15\xff\xfe\'B\xf5\xc3\xe4\xa7\xa0\x85\x8d\x06\xd0\x12\xe88\xda\x0f\x82\xbc7l\x89,\x12\xa6Nm\xd4\xde\x0e\x07p\xd3\xfcCYH\xb3\xcaBq\x9e\xc8\xa9\xff\xcd*\xd3\x9a\xb4\x96W\xe0\x9d\xf8x\xac\xc8\xb0\xb85x\xe3\xf7Z\x12Q\xf4\x16\xc2?\xbe.\xcfo!\x9dr\x16\x13z\x13\xcc/+\xc5\x98\x1b\xa6+\xcb\xc5E\xe2\xd0\xf4\x03E#\x97,\xa9\n[0\x0eE\xc0\xff\xaa2\xd9uC\xa4\x83m\xb5\xeb\x97\xaf\x9c\xac\xe2R$\xa4\xf3\xf6\xbb\x1d\xea+\xc8\xdb\xc6n\'\x9b\xa0\xcbs\x8a\xcbL\x17x\xe1:/H\xba\x96(o\xa5\xb4p\xb5\r\x967\xc540\r\xeb\xd5\xc4U\xb5F\xc4[1&\x11wM\xa0\x15\x862\xc3\xfd\xcdB\xde:\x82\xc2\x12\x1b74\x91\x8bT\x7f\x1d\x9b\xcc\x9do\x9e\xaa\x17\x06\xfccB\x93\x01Oz\x04#\xde\xf8\xd7\x89\x08\xdfr~|\xc1bq8\xb2hc\x8bg8\xbd\xad\x92\x8b\xaaE~(f<\xbb\x9e\xbfQ\x83\xe1t\xa4\xa9\xab\x8b\xe9c\xc2}\xb3\x1bA\xa8\xef \xa3VjX\xd5\xe4\x94>\xf0\xdb\xf4\xbd\x95\x10\xbb^\x1a\x1b"\x90\xa2\x0c1\xa4@.\xa9\x05O\xb8p\x10;\xe9 \xc4\xdb\xb6\x9a\xd1\xe5/\xa9#\x96\x12>\x03="\x8d\x90h\x86\xad\xd4{+1\xac\xe0\x02\xc3*"v\xd7\x1e]\x13d%\xf3\x1b\xef\x1a\x7fd\xdf\xfaH\xc8o\xf8\x89\x97f\\-]\xaf\xcd\x99+\xb0Wz\xd7A\x89\x88\xc8\xd7\xbb\xf5\xc5\xe3\xe8\xc3\xc4:\xb7\xec\x06\xa7\x97lpA\xaa\x02\x0f\r\xd1t\xaa\xae\xde(\xa4hV\x18\x87\xa2c\x0b<\xc2\xc7#1,\x1c}\x84L\xf8\xce\xce\xa4h@\xe1\x9e\xb9\xe7w1\x02\t\xbb\xd6\xa2F\x1b\xcc\xea\xd7\x92\xa8\x1e\x04\xbcY@G\x9c\x8e\x97+\xef\x94Tk\x83j3\x8f\x8cz<\x99J\x06aj\xf6\xea\xcdI\xe0\xe7\xd3\xf0o\xf71\xb4\xd1u\x07FX\xd9Bg]\'\xe10\xeb\x03Q\xd0\xa3\x81-\xd3\x82\xb8U\xbb\x16\\8\xc6V\xd2\xb1\'\xb8\xc0\x1c\xe8\xba>.\xaa\xca\x00%\xbf\xf5!\xccd\t@\xact\x85\x1e%\xb1y\x01g\xd6\x08\x9d\xa9\x89U3\xdb[\xcb\xadM\xbd\x9c\xc85\t\x91\xcb)9\xb6\xa5\xcc\xdc1\x84\xe2\xafy\xdb_\xe2,S\'\xf0\x9c\x9ft\'\x05Esm\xa1\x9e\x1a\xec\xd8\x8fF|}\x80\xbe\x9e@X\x92#\t\x0bs\x14\xd53$UI\xc1\xb9?\xcd[\xc5>\xb7jV]\x95\x99\xc0\xd2\x05\x7f:X\xb9\x06\xf5\x95Q\x1cm\x19%p\x812\xbb\x19j\x8e\x10\xddC\x1e\x8c\x19ff9\xbde\xbb\xf3\x901#%\xcf\x14?\xe5H\xaa\x84\xd1\x85)sG\xb1\x88\xd2\xf8^\xfd\xaa\xd7D\x91\x15\x84\x85 }\x1b6\xcdQ:\xbe+90z0\xb3\n\xa1=\xc9_\x9a\xef\x8b0r\xbb}\x8b,?\xf3IwC\xf0\x8a$\xd7\x03\x8f\xcfT\xa4\xc7\xa9\xb68m\xc3\x1a\\\xffj\x91\x88\x0ew\x19\xda5\xe3\xff\xc6\x03\xbf\xdbh.%\x8d\x89w\xe9\xf3]\xd9z\xde\x99\x94\x05\xfd\x19\xdfCV\xd2K\x00w\xd2\xe1\x07\xb9q\xa5h:j\xdc\'b\xb3\xf9*\xed\xde6nV\x0f\xe50z\xce\xe5N\xef\x13u\x86;\xf7\xddG\xc6\x0e\xe6"\xbd\t\x02^\x0f\xed]N<VMo/k\xc1\x80i2\x10\t]t\x11i\x01\xfb\xf5\xcf\xcb|\xad\x8b\x86\xcd\xe5\x87\xd5\xa8\x93.\xa2!J\xcf\xc8Q\xa6\xb18\xe2\xc9\x15L\xf507R\xc8\xde\x10\x84\x7f\xd5\xf0\x83\x1a\x03\x89W"\xe3k\xc8\x1e\xc6\x8b\x81D.W\x1e1\x95\x87\x88f\x15\xf0\x03\xd4wp\xee>\xdc\'\x13\xcb\xfa\xfe\xa0\xc5\xa6\n\x9221\xd8d\xdb\xaa\nB\xdfqIWW\xb8\x87n\xcb;\xfb\xdd\x86\xa7\xcaq\x9f_\xf3\xc8/\xa2\xe2\xb8\x7f\x87j*}/\xd9\x98\xb3\xb2\xc3\xc2w{m\x8d?\xcf\xdax\x08}\xf3\t\xc3\x1e8R\xb0\xe2\xe6@\xe4k\x1e\xf0*\xf6\x85\xce \x85\xb9\x9d\x81\xb3*\xe9\xafC\xe6\xb9X\xa3q\xa8EE.\xfe\xb5\xc2\xac\xeaS~s\x10Ll\xa6\x8d\xc3g\xda\x9d\x916\x1f\r@\x1fi_!J\xde\xdcl\xd1\xb6\x9bf\xf6\xccDQA^\xf8ujh\xe6\xfd\xe2\xa2\xaf\xda\x1d\xde<\x9b+\xaf\xdf\xd5\xe9\x1a{\x9b4\xc7*\x16\x90\xb2=\\\xda\xbe\xd7\x98\xdb\x10><7b=\x01\xbb\xd0\xce\x8eko&\x84\x82\x17\x93\x7f\xe3U\x17\xd7\xef \x8f\x99\xc2\x1d9\xd8\x1d\x9c\xadn*\x0e\x05\x1d\ne(\xb7z\x0c\xef15\xdb\xb4\xef\xb0\n\xf9\x002\xb8/\xdf\x8e\xcf\xbc1\xba\xf4dGM1\x03,\xbc\xc8\x822\xa0F\xc6\x16\x0c\x89_G=\xd0>\x9bL\x18\x00\xde\x9c\xfa\xdb\xefpC\x1a\xc8\xf4F\xa3\x1c\xc8\xa4<Yr\x1f-\x89^l{\xa0c\x9c\x90\x86wb|\xfd\xde\x9f\xe4\xd4\xb3jMA\x18\xb8\x8b0~3\xa5\x05\x8a{y|\x81%\x97\xc7\x81e\x9d\xe4\x85\xde\x86\x8f\x970\xd5\xd6\xb5\x06;-\x86\xb8\xcdw\xf3G~\xc8\x82\x97\xcfWb\xd4\x14y\x1b\xfa\x97\xdb\xff\x1a\xd3Q\n\x08H\x90\xf6\xb5@0\x93\x1c\xd6o\x97\x10\xfe\x88\t\x1a76\xb6r\xe4\xb6\xa2\xccT0\xe3\xef\xeb:\x06r\xbf%p\xb6\x7f\xd2=\xb1"\x07\x87\x90\xb4V\x85\x12\xa2\xe5\xe9ZNy\x9b0-[\xe7\x9c\xcc\x1c\':\x9fL\xcf\xd7\n\xa2Y=L\x99Sr^\x0f\x9b\x06\xb4\x1b\xbc\x1a?\xfdT\xd8>\xd7\xd4\xf1=\xaf^\xe2jC\x14\xd0\xd3\xbc\x8b\x0c\xf1?/85\r#\x8e\xa8\x07\xb9\xacH\x0e\xec\xa1\xfeP\xf2\x95\xb4f\xad\xfa\xfe\xe2\xbd\xfc\xdc\x8ba~%[yt\xb1\x9e+\xcc/\xa8v\xb2\x06\x05G\xa3\xcbX4\xfdos\xb4\x10\x9c\x83#\xde\xcbG\x17\xae\xe1\xc2\xc9>X\x10\x94d\xc6\xff<I\xabmn\x18\x89_\xd0\xd0m\xbf\x0c\xf8>\xd0\xffV\xe24\x8e\xf8\xab\x82\xccS\x16\xfb\x15\x0c\xba\xfc\x97\xaef\x17\xb9\x9c\xddb\x08)\xac]\x94\xba\xd5P\xd0\x03\xefkt>\x99\x9c<(\xc4\x99\xf2j\x9f\xc4R\x83\xb6t\x0e\x81l\xbb\xa6\x13\x8d\xf6\xd9\xf77\xb0^\xc1\x0b\xe0\xf2\xeew;\x9bX\x13\xe6\x98\xa9-\xf2k5\x8e\xeb\x19\x0bg\xfd\xe2$J)\xb1\x8e\xad]\xf2\x82I\x1e\xb3n\xa0\xad\xb1\x1f$f\x12\n\xd4\x9a\x94\xaa*\x86\x0em\x1a\x9e\xf9\xb9(\xc7\xe6\xb4\x98\x00z\xb3\x99\x16z\xe1&\xd4H`\x9e|\x98j\x95\xff\xd6\xf3\xa8\xaafn!R\xfd\xac\xd2\xe5,\xb7\xf4\xca\xb6&\xa2\xadn`@_CT\xc1\xfbR\x1ep\\jTB\x13\x91\x8e>\x8e\xa2)\xd6Yg\xcf\xb0\xbbu+\xb8\x17\xea\x9b:\xcb&*\x93\x19\x88\x15&\x05\xcd\x87\xfc]\xbc<\x16\xee\xbf\x9f;\x8f\x02\xbc\xa5\x1e,J\x10\x95v\x8b\xf9R\x00\xbc\xfcs)Z\x97\xfdv\xf2\x05\xe6\x14<\x9d\xe6\xe3\x03\xed\x8b\xa1\xb5\x8c\xff\xca\xda\xe1\xd7\x14{\xa8\xcb\xf5\xc3\x8e\x9bV\x81-\xce\x8e\x1d\xae\xb8\xf4:d\xd6\x9b\xb7\xeey\xed\xd9\xf9\xc5\xeb\x82\xcc4\xa6\t\xdevC\x8d\xf5\xa1"g\x1f\xd17\xe1K\xc6\xc2\x8f\x96\x15\xa8\x18\xe0R\x0ep\n\x10\xc7\x83\xe5G\x18\xb9\x1b\x82T\xe0<\xf0R\xae\x19\xf9@n*F-\x04v\\\xa7\t\x8fp\xcavWGP\x0c\x1e#HQ\x11\x97^\x0f\xc1E\xd6}\x7f\xb2\xeb$\xfd\xf3\x02\r\xb6@F\x9e7\xad\xc6\xe0A\x9ak\xdb\x16\xeb\x95|\xbfD\xee\x9c\xf8&\xf1\xe9\xf8\xe5\x8c\x98\x13\xd0\xc5\r\xef\xd7;\x193\xc7%,\xa6W\xfb\xe2]\xb5\xa0\xf6\x87S\x81\xc1\x82;\x91\xf8\x0f\xb2\x07[\x8c\xd7|\xab\x81\xa6F\xa4FE\xdf\xdb>\xd1z\xff\x01\xb3\xd3\xd47\xbd\xee.?\xcf\x88\x86~\xc6\x0cHs\x03\xd9\x04\xae\x9d\x98r\xda\xe9\x05\x88)\xcet\xd10\xe5\x99\xe6tFP\x1a\xdc\xea\x96\xa0\xd3#\xb3\x10Z\xfc\xd8:\xa6J#\\\xb1iS\'\xf5\xc6\t\x9c\xe3\x80O\x18\xdfn\xa0PJ\xb5\xb5\xde\x03\\\xc9\xb00\x91\x9a_\xf9H\xb8\xb8Ox\x086\xbe\xb4\xd5o\xd1\x1a\x95\xa8"]\xac:K\x95\xce\n\xa8\xe7\xdc\xdb\xd5\xa2\xa8\xf5\xba_\x14\xb5\xae\xf4\xaa\xa2Z\xf6\x8e\xcd\x9c\x1d\rg\x8e\x87\xd3\x98Edl\x9b;S\x17\xec\xf60q\xbe&w\xc3\x01\x809Yj\x88\x995\xcb\xfc\xd2\xb3\xf7\xa2\xf8\x8c\xc0\xec\xabO\xe7\xb5\x0cx\xcbO\xd4i\x84\x12R)3\x12&\xf3ww\x18\xf4Y\x90\xa9B\xd8\x8c\x1b\xf5pK\xbb\xaa\xc5\xf0\xe5\xbdj\xf1\xa1\x95\x90\xc8\x8c\xd9+]ANL\xb6\xac\x01\xdfd\xe7\x0e\xbc\t\xfd^\xe0\xf1J\xf1V\x9e\xd7\x83\x1d\x8f\x85\xad\x7fQ\xe6\x90\x12\x9e\x1b\xaa\x8b\x03\xab\x94\xba\x0e\xbfF\x18\xf1\xee\xd3\xc8\xa6\xd2l\xac\x0f\xe5\xfa\x0b~\xed\xb1\x9f\xb4f\x00\x95I\x9d\x8e\xa3\x14\xddV\xa8\x8b:6m\xfeH\xed\x14\xd6(\xaf?\x81\x12*Uw\xd4\xdb\xadl\xdc\x14\xeab]Y\'\xaf79\xa2\x99\xf0\xfet<\x17RK\xf8\x0e\xfe\xa7\xfe\xbav\xea\xcc\xdb73_\x89\x99l\xa9Y\x84Ahh\x17Y\x98&B\x1b\nQ\xc4bJ\xc1\xd4\xcd\xe9\x1c\xc4\xeb=,^+\xe3\xf7\xa8\xa0\xd4c&L\x10\xba\x16H\xedmpm\x9e\x86\x00\xd2\xc0!f=\xdbJ2\xefz\xb6\xa5j\x80!\x90sj\x94\xe3\x13?_\xc0\xd3\xa7v\xaf1Q\x81\xe9\x0c\x9c\x9d|\x0f\xac\xcc\x8a\x90\xa3\x96\x12\xdb\xd3]pz\xc2<\x85;\xd0C\xd0\x86\xca\xe0y\xfd\xc9\x10\x95V\xd9c\xa3\xba\xe32|\xed\xbb\xdb\x7f{H,\x99\x93H +\x9c\xbcVv\xc9\x91;\xca\x8a\xd1\xb7!\x07\x0e\x1c\xb3\x14c\x82I\xb2n\x87%+\xa7\xe1=\xa1\xd8L\xc4\x10\xee\x94\xbf\x99\xae]\xe6\x8f\x88O\x88Gx\xb3\xf5\x05Wz\xcek\xf8\xe1\xc5xz\x0bU\xcb\xc0e\xcf\xe4R\x922PM0]\xa9\xd9\x8b\x84\xfa\xf6\xcbU\x9e?\x97tn\xa2Xa\xa5\xde\x9aTx\t\x08x=\xca\xa0\xab\xe4\xc1\x97q\xba%\xa8\x1d\xb3\xd4\xacl\xbd\x00\xbb\xa3\xd6\xc3\x85E7:$\xbd\x99^\xc0\x8b\xef0\x03\xc1\x17\xf0p\xfd*)7\xe7\xa46\x0f\xfdR\x0e\xf0\x91L\x1d\xde&\xf5h\x11\x11\x11/\xf1A\xfb\xd8\x1c\xd9\xf6\xee\x19A\x11\xbdSF\x05\xb5\xfeoh~\xe3\xef\x99\x93k\xdbX\xaa\xb0Y\xd7+\xcb\'\x98\x84\xf6\xcbv\x0e\x96 \xffJ?\x8a\xddz\xbf\x12\xfb\xe7D?w9\xacc\xf4g\x91\xe5\xb8\xcb@xu\xe3\xb3?[k\xc2A\xd1W\xbd\xe4`$G\x1e+\x13rrr\xa1\xbd/hi5^\x0f\x80\x1a|\xdc\x17\xa2\x0f\tpZ\xc8\xf8\x0b\x1bm\xa6\x14\xef\x8aT\xe6\x8b\x01Y\xde\xc5R\x1a\x9fW\xde\xc3c\xe3\x8bx\xe8+Hp\xe9\xaby\x85\x14\xe1+!\xbf\xa8)\xa4\x0cT\xc26\xf55\xebDQ\xe3\x0b\xc7\xf09+\xfdI\xf6<\xdf\xbfL\x0f\x03\x01\x01\xe1\n\xff\x9f\xd8\x998\xbbX\x99\x00]\xff\xc7\xf0\x02Z\xffr\x85\xfd\x0f\x80\x0e&f.\xaeH\xff\x91\x99\xb9\xa9\x83\x9d\xa3\xb3\xb9\x8b\x0b\xfd\x7f\x1d\xc4\xff\x8a\x0b\xd4\xffL-]\x11\xfe+Bv\x0efn@\xf3\x1f0\xff\xdf\'\x80\\@\x91\xa4\xfc?_\xf2\xff\x00\xc7\x98r\xeb')))
| 42,919.6
| 214,472
| 0.734685
| 49,370
| 214,598
| 3.187401
| 0.156836
| 0.000445
| 0.000572
| 0.000534
| 0.000394
| 0.000394
| 0.000394
| 0.000394
| 0.000394
| 0
| 0
| 0.230854
| 0.00137
| 214,598
| 5
| 214,472
| 42,919.6
| 0.503439
| 0.000461
| 0
| 0
| 1
| 89
| 0.663105
| 0.661277
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
977a17ea20f82c95851653b451acffe6e999767f
| 50,080
|
py
|
Python
|
pyzk/test.py
|
amanat-juwel/laravel-student-attendance-system
|
3d39f6d04e7a676b908d7ad79d1213db0411e980
|
[
"MIT"
] | null | null | null |
pyzk/test.py
|
amanat-juwel/laravel-student-attendance-system
|
3d39f6d04e7a676b908d7ad79d1213db0411e980
|
[
"MIT"
] | null | null | null |
pyzk/test.py
|
amanat-juwel/laravel-student-attendance-system
|
3d39f6d04e7a676b908d7ad79d1213db0411e980
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# # -*- coding: utf-8 -*-
import sys
import os
import unittest
import codecs
from mock import patch, Mock, MagicMock
mock_socket = MagicMock(name='zk.socket')
sys.modules['zk.socket'] = mock_socket
from zk import ZK, const
from zk.base import ZK_helper
from zk.user import User
from zk.finger import Finger
from zk.attendance import Attendance
from zk.exception import ZKErrorResponse, ZKNetworkError
try:
unittest.TestCase.assertRaisesRegex
except AttributeError:
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
def dump(obj, nested_level=0, output=sys.stdout):
spacing = ' '
if type(obj) == dict:
print >> output, '%s{' % ((nested_level) * spacing)
for k, v in obj.items():
if hasattr(v, '__iter__'):
print >> output, '%s%s:' % ((nested_level + 1) * spacing, k)
dump(v, nested_level + 1, output)
else:
print >> output, '%s%s: %s' % ((nested_level + 1) * spacing, k, v)
print >> output, '%s}' % (nested_level * spacing)
elif type(obj) == list:
print >> output, '%s[' % ((nested_level) * spacing)
for v in obj:
if hasattr(v, '__iter__'):
dump(v, nested_level + 1, output)
else:
print >> output, '%s%s' % ((nested_level + 1) * spacing, v)
print >> output, '%s]' % ((nested_level) * spacing)
else:
print >> output, '%s%s' % (nested_level * spacing, obj)
class PYZKTest(unittest.TestCase):
def setup(self):
pass
def tearDown(self):
pass
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_no_ping(self,helper, socket):
""" what if ping doesn't response """
helper.return_value.test_ping.return_value = False #no ping simulated
#begin
zk = ZK('192.168.1.201')
helper.assert_called_with('192.168.1.201', 4370) # called correctly
self.assertRaisesRegex(ZKNetworkError, "can't reach device", zk.connect)
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_correct_ping(self,helper, socket):
""" what if ping is ok """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 1 # helper tcp ok
socket.return_value.recv.return_value = b''
#begin
zk = ZK('192.168.1.201')
helper.assert_called_with('192.168.1.201', 4370) # called correctly
self.assertRaisesRegex(ZKNetworkError, "unpack requires", zk.connect) # no data...?
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_invalid(self, helper, socket):
""" Basic tcp invalid """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.return_value = b'Invalid tcp data'
#begin
zk = ZK('192.168.1.201')
helper.assert_called_with('192.168.1.201', 4370) # called correctly
self.assertRaisesRegex(ZKNetworkError, "TCP packet invalid", zk.connect)
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_connect(self, helper, socket):
""" Basic connection test """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.return_value = codecs.decode('5050827d08000000d007fffc2ffb0000','hex') # tcp CMD_ACK_OK
#begin
zk = ZK('192.168.1.201') # already tested
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
conn.disconnect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e903e6002ffb0100', 'hex'))
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_force_udp_connect(self, helper, socket):
""" Force UDP connection test """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.return_value = codecs.decode('d007fffc2ffb0000','hex') # tcp CMD_ACK_OK
#begin
zk = ZK('192.168.1.201', force_udp=True)
conn = zk.connect()
socket.return_value.sendto.assert_called_with(codecs.decode('e80317fc00000000', 'hex'), ('192.168.1.201', 4370))
conn.disconnect()
socket.return_value.sendto.assert_called_with(codecs.decode('e903e6002ffb0100', 'hex'), ('192.168.1.201', 4370))
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_udp_connect(self, helper, socket):
""" Basic auto UDP connection test """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 1 # helper tcp nope
socket.return_value.recv.return_value = codecs.decode('d007fffc2ffb0000','hex') # tcp CMD_ACK_OK
#begin
zk = ZK('192.168.1.201')
conn = zk.connect()
socket.return_value.sendto.assert_called_with(codecs.decode('e80317fc00000000', 'hex'), ('192.168.1.201', 4370))
conn.disconnect()
socket.return_value.sendto.assert_called_with(codecs.decode('e903e6002ffb0100', 'hex'), ('192.168.1.201', 4370))
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_unauth(self, helper, socket):
""" Basic unauth test """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d5075bb2cf450000', 'hex'), # tcp CMD_UNAUTH
codecs.decode('5050827d08000000d5075ab2cf450100', 'hex') # tcp CMD_UNAUTH
]
#begin
zk = ZK('192.168.1.201', password=12)
self.assertRaisesRegex(ZKErrorResponse, "Unauthenticated", zk.connect)
socket.return_value.send.assert_called_with(codecs.decode('5050827d0c0000004e044e2ccf450100614d323c', 'hex')) # try with password 12
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_auth(self, helper, socket):
""" Basic auth test """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d5075bb2cf450000', 'hex'), # tcp CMD_UNAUTH
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex') # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201', password=45)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d0c0000004e044db0cf45010061c9323c', 'hex')) #auth with pass 45
conn.disconnect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e90345b6cf450200', 'hex')) #exit
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_size(self, helper, socket):
""" can read sizes? """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d64000000d007a3159663130000000000000000000000000000000000070000000000000006000000000000005d020000000000000f0c0000000000000100000000000000b80b000010270000a0860100b20b00000927000043840100000000000000', 'hex'), #sizes
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201') # already tested
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
conn.read_sizes()
socket.return_value.send.assert_called_with(codecs.decode('5050827d080000003200fcb9cf450200', 'hex'))
conn.disconnect()
self.assertEqual(conn.users, 7, "missed user data %s" % conn.users)
self.assertEqual(conn.fingers, 6, "missed finger data %s" % conn.fingers)
self.assertEqual(conn.records, 605, "missed record data %s" % conn.records)
self.assertEqual(conn.users_cap, 10000, "missed user cap %s" % conn.users_cap)
self.assertEqual(conn.fingers_cap, 3000, "missed finger cap %s" % conn.fingers_cap)
self.assertEqual(conn.rec_cap, 100000, "missed record cap %s" % conn.rec_cap)
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_users_small_data(self, helper, socket):
""" can get empty? """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d64000000d007a3159663130000000000000000000000000000000000070000000000000006000000000000005d020000000000000f0c0000000000000100000000000000b80b000010270000a0860100b20b00000927000043840100000000000000', 'hex'), #sizes
codecs.decode('5050827d04020000dd05942c96631500f801000001000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003830380000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003832310000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833350000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833310000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833320000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003836000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000383432000000000000000000000000000000000000000000','hex'), #DATA directly(not ok)
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
#codecs.decode('5050827d08000000d00745b2cf451b00', 'hex') # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201' )
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
users = conn.get_users()
socket.return_value.send.assert_called_with(codecs.decode('5050827d13000000df053ca6cf4514000109000500000000000000', 'hex')) #get users
self.assertEqual(len(users), 7, "incorrect size %s" % len(users))
#assert one user
usu = users[3]
self.assertIsInstance(usu.uid, int, "uid should be int() %s" % type(usu.uid))
if sys.version_info >= (3, 0):
self.assertIsInstance(usu.user_id, (str, bytes), "user_id should be str() or bytes() %s" % type(usu.user_id))
else:
self.assertIsInstance(usu.user_id, (str, unicode), "user_id should be str() or unicode() %s" % type(usu.user_id))
self.assertEqual(usu.uid, 4, "incorrect uid %s" % usu.uid)
self.assertEqual(usu.user_id, "831", "incorrect user_id %s" % usu.user_id)
self.assertEqual(usu.name, "NN-831", "incorrect uid %s" % usu.name) # generated
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_users_broken_data(self, helper, socket):
""" test case for K20 """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d007d7d758200000','hex'), #ACK Ok
codecs.decode('5050827d58000000d0074c49582013000000000000000000000000000000000002000000000000000000000000000000000000000000000007000000000000000000000000000000f4010000f401000050c30000f4010000f201000050c30000','hex'),#Sizes
codecs.decode('5050827d9c000000dd053c87582015009000000001000000000000000000006366756c616e6f0000000000000000000000000000000000000000000000000000000000003130303030316c70000000000000000000000000000000000200000000000000000000726d656e67616e6f0000000000000000000000000000000000','hex'),#DATA112
codecs.decode('000000000000000000000000323232323232636200000000000000000000000000000000','hex'), #extra data 36
#codecs.decode('','hex'), #
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for get_users TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for free_data TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for exit TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201') #, verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
users = conn.get_users()
#print (users) #debug
socket.return_value.send.assert_called_with(codecs.decode('5050827d13000000df05b3cb582014000109000500000000000000', 'hex')) #get users
self.assertEqual(len(users), 2, "incorrect size %s" % len(users))
#assert one user
usu = users[1]
self.assertIsInstance(usu.uid, int, "uid should be int() %s" % type(usu.uid))
if sys.version_info >= (3, 0):
self.assertIsInstance(usu.user_id, (str, bytes), "user_id should be str() or bytes() %s" % type(usu.user_id))
else:
self.assertIsInstance(usu.user_id, (str, unicode), "user_id should be str() or unicode() %s" % type(usu.user_id))
self.assertEqual(usu.uid, 2, "incorrect uid %s" % usu.uid)
self.assertEqual(usu.user_id, "222222cb", "incorrect user_id %s" % usu.user_id)
self.assertEqual(usu.name, "rmengano", "incorrect uid %s" % usu.name) # check test case
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_users_broken_tcp(self, helper, socket):
""" tst case for https://github.com/fananimi/pyzk/pull/18#issuecomment-406250746 """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d09000000d007babb5c3c100009', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d58000000d007292c5c3c13000000000000000000000000000000000046000000000000004600000000000000990c0000000000001a010000000000000600000006000000f4010000f401000050c30000ae010000ae010000b7b60000', 'hex'), #sizes
codecs.decode('5050827d15000000d007a7625c3c150000b4130000b4130000cdef2300','hex'), #PREPARE_BUFFER -> OK 5044
codecs.decode('5050827d10000000dc050da65c3c1600b4130000f0030000', 'hex'), # read_buffer -> Prepare_data 5044
codecs.decode('5050827df8030000dd05d05800001600b013000001000e35313437393833004a6573757353616c646976617200000000000000000000000000000001000000000000000035313437393833000000000000000000000000000000000002000e33343934383636004e69657665734c6f70657a00000000000000000000000000000000000100000000000000003334393438363600000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003337333139333600000000000000000000000000', 'hex'), # DATA 1016 -8 (util 216)
codecs.decode('0000000100000000000000003734383433330000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003433333939353800000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003333373335313100000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003337373535363100000000000000000000000000000000000b000000', 'hex'), # raw data 256
codecs.decode('0000000004000e00000000000000000000000000000000000000000000000000000000000000000000000001000000000000000032333338323035000000000000000000000000000000000005000e000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000333632363439300000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000313838343633340000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000000000000', 'hex'), #raw data 256
codecs.decode('00000000000000000000000000000000000000000000000000000000000000000000000100000000000000003131313336333200000000000000000000000000000000000c00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003130353233383900000000000000000000000000000000000d00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003135333538333600000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000000000000000000100000000', 'hex'), #raw data 256
codecs.decode('000000003933313637300000000000000000000000000000', 'hex'), #raw data 24
codecs.decode('5050827df8030000dd0520b601001600000000000f00003334323931343800000000000000000000000000000000000000000000000000000000000100000000000000003334323931343800000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003139303636393700000000000000000000000000000000001100000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003139333831333500000000000000000000000000', 'hex'), # DATA 1016 -8 (util216
codecs.decode('00000000120000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000393231303537000000000000000000000000000000000000130000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000333634383739340000000000000000000000000000000000140000323831353732000000000000000000000000000000000000000000000000000000000000010000000000000000323831353732000000000000000000000000000000000000150000000000000000000000000000000000000000000000000000000000000000000000', 'hex'), #raw data 256
codecs.decode('00000001000000000000000031383133323236000000000000000000000000000000000016000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000035393037353800000000000000000000000000000000000017000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000031363933373232000000000000000000000000000000000018000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000033363430323131000000000000000000000000000000000019000000', 'hex'), #raw data 256
codecs.decode('00000000000000000000000000000000000000000000000000000000000000000000000100000000000000003331303733390000000000000000000000000000000000001a00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003433353430393400000000000000000000000000000000001b00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003338303736333200000000000000000000000000000000001c00000000000000000000000000000000000000000000000000000000000000000000000000000100000000', 'hex'), #raw data 256
codecs.decode('000000003231333938313700000000000000000000000000', 'hex'), #raw data 24
codecs.decode('5050827df8030000dd059a2102001600000000001d00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003333383738313900000000000000000000000000000000001e00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003439353634363800000000000000000000000000000000001f00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003832343030300000000000000000000000000000', 'hex'), #DATA 1016 -8 (util 216)
codecs.decode('00000000200000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000333937373437370000000000000000000000000000000000210000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000343435383038340000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000343430353130390000000000000000000000000000000000230000000000000000000000000000000000000000000000000000000000000000000000', 'hex'), #raw data 256
codecs.decode('00000001000000000000000033353732363931000000000000000000000000000000000024000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000033363336333832000000000000000000000000000000000025000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000033333232353432000000000000000000000000000000000026000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000039393437303800000000000000000000000000000000000027000000', 'hex'), #raw data 256
codecs.decode('00000000000000000000000000000000000000000000000000000000000000000000000100000000000000003836333539380000000000000000000000000000000000002800000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003338383736383000000000000000000000000000000000002900000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003739393434350000000000000000000000000000000000002a00000000000000000000000000000000000000000000000000000000000000000000000000000100000000', 'hex'), # raw data 256
codecs.decode('000000003532313136340000000000000000000000000000', 'hex'), # raw data 24
codecs.decode('5050827df8030000dd053da903001600000000002b00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003439373033323400000000000000000000000000000000002c0000000000000000000000000000000000000000000000000000000000000000000000', 'hex'), # DATA 1016 -8 (util 112)
codecs.decode('0000000100000000000000003134363732353100000000000000000000000000000000002d000e32363635373336006d61726368756b0000000000000000000000000000000000000000000100000000000000003236363537333600000000000000000000000000', 'hex'), # raw data 104
codecs.decode('000000002e00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003136383133353200000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000034393633363732000000000000000000000000000000000030000000', 'hex'), # raw data 152
codecs.decode('00000000000000000000000000000000000000000000000000000000000000000000000100000000000000003337363137373100000000000000000000000000000000003100000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003231353939353100000000000000000000000000000000003200000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003136393734323700000000000000000000000000000000003300000000000000000000000000000000000000000000000000000000000000000000000000000100000000', 'hex'), # raw data 256
codecs.decode('0000000033373336323437000000000000000000000000000000000034000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000031323930313635000000000000000000000000000000000035000000000000000000000000000000000000000000000000000000', 'hex'), # raw data 128
codecs.decode('0000000000000000000000010000000000000000333236333636330000000000000000000000000000000000360000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000393031353036000000000000000000000000000000000000370000000000000000000000', 'hex'), # raw data 128
codecs.decode('0000000000000000000000000000000000000000000000000000000100000000000000003238313732393300000000000000000000000000000000003800000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003437303630333800000000000000000000000000', 'hex'), # raw data 128
codecs.decode('5050827df8030000dd05037d04001600000000003900000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003136343731353600000000000000000000000000000000003a0000000000000000000000000000000000000000000000000000000000000000000000', 'hex'), # DATA 1016 -8 (util 112)
codecs.decode('0000000100000000000000003530313435310000000000000000000000000000000000003b00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003534363236373300000000000000000000000000000000003c00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003533363730310000000000000000000000000000000000003d00000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003435383033303700000000000000000000000000000000003e000000', 'hex'), # raw data 256
codecs.decode('00000000000000000000000000000000000000000000000000000000000000000000000100000000000000003136333835333200000000000000000000000000000000003f000e3336323634313900000000000000000000000000000000000000000000000000000000000100000000000000003336323634313900000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003233323331383500000000000000000000000000000000004100000000000000000000000000000000000000000000000000000000000000000000000000000100000000', 'hex'), # raw data 256
codecs.decode('0000000035323930373337000000000000000000000000000000000042000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000033393839303636000000000000000000000000000000000043000000000000000000000000000000000000000000000000000000', 'hex'), # raw data 128
codecs.decode('0000000000000000000000010000000000000000343033323930390000000000000000000000000000000000440000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000323034363338380000000000000000000000000000000000450000000000000000000000', 'hex'), # raw data 128
codecs.decode('0000000000000000000000000000000000000000000000000000000100000000000000003733383730330000000000000000000000000000000000004600000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000003239313836333600000000000000000000000000', 'hex'), # raw data 128
codecs.decode('5050827d0c000000dd0507fa0500160000000000', 'hex'), # DATA 12-8 (util 4 ok) and ACK OK!!!
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for get_users TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for free_data TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # CMD_ACK_OK for exit TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201') # , verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
users = conn.get_users()
#print (users) #debug
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000de05aebd5c3c1700', 'hex')) #get users
self.assertEqual(len(users), 70, "incorrect size %s" % len(users))
#assert one user
usu = users[1]
self.assertIsInstance(usu.uid, int, "uid should be int() %s" % type(usu.uid))
if sys.version_info >= (3, 0):
self.assertIsInstance(usu.user_id, (str, bytes), "user_id should be str() or bytes() %s" % type(usu.user_id))
else:
self.assertIsInstance(usu.user_id, (str, unicode), "user_id should be str() or unicode() %s" % type(usu.user_id))
self.assertEqual(usu.uid, 2, "incorrect uid %s" % usu.uid)
self.assertEqual(usu.user_id, "3494866", "incorrect user_id %s" % usu.user_id)
self.assertEqual(usu.name, "NievesLopez", "incorrect uid %s" % usu.name) # check test case
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def _test_tcp_get_template(self, helper, socket):
""" can get empty? """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d15000000d007acf93064160000941d0000941d0000b400be00', 'hex'), # ack ok with size 7572
codecs.decode('5050827d10000000dc05477830641700941d000000000100', 'hex'), #prepare data
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
#codecs.decode('5050827d08000000d00745b2cf451b00', 'hex') # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201', verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
templates = conn.get_templates()
self.assertEqual(len(templates), 6, "incorrect size %s" % len(templates))
#assert one user
usu = users[3]
self.assertIsInstance(usu.uid, int, "uid should be int() %s" % type(usu.uid))
if sys.version_info >= (3, 0):
self.assertIsInstance(usu.user_id, (str, bytes), "user_id should be str() or bytes() %s" % type(usu.user_id))
else:
self.assertIsInstance(usu.user_id, (str, unicode), "user_id should be str() or unicode() %s" % type(usu.user_id))
self.assertEqual(usu.uid, 4, "incorrect uid %s" % usu.uid)
self.assertEqual(usu.user_id, "831", "incorrect user_id %s" % usu.user_id)
self.assertEqual(usu.name, "NN-831", "incorrect uid %s" % usu.name) # generated
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def _test_tcp_get_template_1(self, helper, socket):
""" cchekc correct template 1 """
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d10000000dc055558d0983200dc040000f0030000', 'hex'), # tcp PREPARE_DATA 1244
codecs.decode('5050827df8030000dd0500f4000032004d9853533231000004dbda0408050709ced000001cda69010000008406316adb0c0012062900d000aad221001600390caf001cdbb106240031007e033bdb3b00e9067700850083d42b004300c503f40043dbd6037b005000460ea7db5900910f90009f0012d5e7005c00970a5f006ddb', 'hex'), # DATA (tcp 1016, actual 112?)
codecs.decode('930fa1009a00560f86db9d00820e86006f007dd3f400ab00a60fcd01b7dbb00b4b00bd0079083adbc00045035d000600c1df7300cc0039049e00dddb380e8c00da00e30dd8dbdc00220e130027004dd9f500e3009d0a6a00e9db26090001ef00ea03c5dbf0002306', 'hex'), #raw data 104
codecs.decode('d000380028d83400ff00430f6200fbdba70dfb0002016203c5db0201a5044b00c10132d4de0006019f080a000cdab70541000f01fe0f19db1901c902e600dc0198d839002f01360ed80037dabd04d4003301520104da38014f01a100830196d5f5004b015c0411005cdacd03bc67ab8d162b48ad18f7fec7448e448387afa1a3', 'hex'), # raw 128
codecs.decode('062b37ca3cf9f53c8087f9150926e03335df1b71aedbd0f2', 'hex'), # raw 24
codecs.decode('b40da90541168df1551f70fc15b51bf26d7d4501bf12915e6485fd966f0ba2072728987dc1018a12ab105ec7aa003508fef08a49b923f3e85e42edf5ea861bd1600d23151787fc78d522f38431883e809f0e4dd2008ecd8ed97670035acf0c763503f27c37ec76d982806986c6016bf952d01e0673820570a87e1a236005ad81', 'hex'), # raw 128
codecs.decode('7d8734949952bb929d81e5fdbcf99ca0c4886d8c65098c0e9aa6ac81e103c684607951d03b0ce9f0cd785885ad27d4f61bfc5de8bc7411de8d8f5910c518e004e9229304f90f9a891395912680ebc6f4c57fd3fceeb684f7c18ba78107fc2e16073e89f6d6b67fbb', 'hex'), # raw 104
codecs.decode('fb11e2feb3effd0e5391c61da77176359f7e4d8a0ff3090a01204501c76a19af07002b003ac0042300dbab0113c2fa07c56e02cbc32bc10400a1c31349df0008102d2a04c5120c9b8904008f0810fb0404c20f3a6407006fd709fbecfe0400041529f60304fd1931fb0b006ede0c391bc1c0c0460e00a3210b1a34c2ffffc3fd', 'hex'), # raw 128
codecs.decode('980f04832806404a5bc1940505da86292d0f0056f600f925', 'hex'), # raw 24
codecs.decode('5c43c243ff06c5733a5d85c7080040473f3d31dd01774d8983c4c000778982750b009459d551c426c3c0170900929b17fba3fc780800376135fefbe0ff1100396aed3b3146265ac0c1ffff15c5357232fffdc0fdc03f3bc141914514003f85e738fdfa2441ff5cc0ff45951504ec7ee9c0fac1fc053dc424c0554affc103c5f8', 'hex'), # raw 128
codecs.decode('94f2fd0e00668b06eac1f9b3c3fdc2fd08008388f3ef460a00869e13a56079cf013fb82d22c394c2c619c3c33ac45304c527e19d4d0c008aab1305c0fa1aff6050110083687dc713c396c0c2c1c104c1c6b10f0072b54cc14d83c519c1760e0055b9f8c1f8187486', 'hex'), # raw 104
codecs.decode('750d00797ff0fdee593bc1090086781657267f11004cc1375050827df4000000dd0548b10100320038ffc024c2fec4c1c18c05c4fad0013ec54051c2879d00cb56521cc2c204c50fc2e62506008eca1a05fec5250d0072d23dc344c2c45cc10a008bd31a3afefa1a92c0080034e68642c45d0d005bdd376707c08da002008ede', 'hex'), # raw 128
codecs.decode('24ffc100e405213306002de78637c4de011de846ff98c100', 'hex'), # raw 24
codecs.decode('07283b590300fef3f5f800da10f5494b031000071819061035084365650b14900834c0c1c4c104c1c5a302100e1134c1c01045c83c8806110e2185c22edd11082424fec006ff02cb052834c3c073c910d4eb965b3833ff0bc582cce18d876a051106f337f826c00410013d2b05c200ca003f4cfeff03d56454ccc101', 'hex'), # raw 124
codecs.decode('5050827d08000000d007fcf701003200', 'hex'), # tcp CMD_ACK_OK
#codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201', verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
template = conn.get_user_template(14, 1)
self.assertEqual(template.size, 1243, "incorrect size %s" % template.size)
self.assertEqual(template.mark, "4d98535332310000...feff03d56454ccc1", "incorrect mark %s" % template.mark)
self.assertEqual(template.uid, 14, "incorrect uid %s" % template.uid)
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_template_1f(self, helper, socket):
""" cchekc correct template 1 fixed"""
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d10000000dc055558d0983200dc040000f0030000', 'hex'), # tcp PREPARE_DATA 1244
codecs.decode('5050827df8030000dd0500f4000032004d9853533231000004dbda0408050709ced000001cda69010000008406316adb0c0012062900d000aad221001600390caf001cdbb106240031007e033bdb3b00e9067700850083d42b004300c503f40043dbd6037b005000460ea7db5900910f90009f0012d5e7005c00970a5f006ddb930fa1009a00560f86db9d00820e86006f007dd3f400ab00a60fcd01b7dbb00b4b00bd0079083adbc00045035d000600c1df7300cc0039049e00dddb380e8c00da00e30dd8dbdc00220e130027004dd9f500e3009d0a6a00e9db26090001ef00ea03c5dbf0002306', 'hex'), # DATA (tcp 1016, actual 112 +104
codecs.decode('d000380028d83400ff00430f6200fbdba70dfb0002016203c5db0201a5044b00c10132d4de0006019f080a000cdab70541000f01fe0f19db1901c902e600dc0198d839002f01360ed80037dabd04d4003301520104da38014f01a100830196d5f5004b015c0411005cdacd03bc67ab8d162b48ad18f7fec7448e448387afa1a3062b37ca3cf9f53c8087f9150926e03335df1b71aedbd0f2', 'hex'), # raw 128 + 24
codecs.decode('b40da90541168df1551f70fc15b51bf26d7d4501bf12915e6485fd966f0ba2072728987dc1018a12ab105ec7aa003508fef08a49b923f3e85e42edf5ea861bd1600d23151787fc78d522f38431883e809f0e4dd2008ecd8ed97670035acf0c763503f27c37ec76d982806986c6016bf952d01e0673820570a87e1a236005ad817d8734949952bb929d81e5fdbcf99ca0c4886d8c65098c0e9aa6ac81e103c684607951d03b0ce9f0cd785885ad27d4f61bfc5de8bc7411de8d8f5910c518e004e9229304f90f9a891395912680ebc6f4c57fd3fceeb684f7c18ba78107fc2e16073e89f6d6b67fbb', 'hex'), # raw 128 +104
codecs.decode('fb11e2feb3effd0e5391c61da77176359f7e4d8a0ff3090a01204501c76a19af07002b003ac0042300dbab0113c2fa07c56e02cbc32bc10400a1c31349df0008102d2a04c5120c9b8904008f0810fb0404c20f3a6407006fd709fbecfe0400041529f60304fd1931fb0b006ede0c391bc1c0c0460e00a3210b1a34c2ffffc3fd980f04832806404a5bc1940505da86292d0f0056f600f925', 'hex'), # raw 128 +24
codecs.decode('5c43c243ff06c5733a5d85c7080040473f3d31dd01774d8983c4c000778982750b009459d551c426c3c0170900929b17fba3fc780800376135fefbe0ff1100396aed3b3146265ac0c1ffff15c5357232fffdc0fdc03f3bc141914514003f85e738fdfa2441ff5cc0ff45951504ec7ee9c0fac1fc053dc424c0554affc103c5f894f2fd0e00668b06eac1f9b3c3fdc2fd08008388f3ef460a00869e13a56079cf013fb82d22c394c2c619c3c33ac45304c527e19d4d0c008aab1305c0fa1aff6050110083687dc713c396c0c2c1c104c1c6b10f0072b54cc14d83c519c1760e0055b9f8c1f8187486', 'hex'), # raw 128 +104
codecs.decode('750d00797ff0fdee593bc1090086781657267f11004cc137', 'hex'), # raw 24?
codecs.decode('5050827df4000000dd0548b10100320038ffc024c2fec4c1c18c05c4fad0013ec54051c2879d00cb56521cc2c204c50fc2e62506008eca1a05fec5250d0072d23dc344c2c45cc10a008bd31a3afefa1a92c0080034e68642c45d0d005bdd376707c08da002008ede24ffc100e405213306002de78637c4de011de846ff98c100', 'hex'), # raw 128-24 (104) +24
codecs.decode('07283b590300fef3f5f800da10f5494b031000071819061035084365650b14900834c0c1c4c104c1c5a302100e1134c1c01045c83c8806110e2185c22edd11082424fec006ff02cb052834c3c073c910d4eb965b3833ff0bc582cce18d876a051106f337f826c00410013d2b05c200ca003f4cfeff03d56454ccc101', 'hex'), # raw 124
codecs.decode('5050827d08000000d007fcf701003200', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201') #, verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
template = conn.get_user_template(14, 1)
self.assertEqual(template.size, 1243, "incorrect size %s" % template.size)
self.assertEqual(template.mark, b"4d98535332310000...feff03d56454ccc1", "incorrect mark %s" % template.mark)
self.assertEqual(template.uid, 14, "incorrect uid %s" % template.uid)
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_get_template_2f(self, helper, socket):
""" cchekc correct template 2 fixed"""
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d10000000dc053b59d0983500f3030000f0030000', 'hex'), # tcp PREPARE_DATA 1011
codecs.decode('5050827df8030000dd056855000035004ab153533231000003f2f10408050709ced000001bf36901000000831f256cf23e00740f4c008900f2f879005500fe0fe3005bf2d30a60005c00a00f32f26600580a2700ad00e3fd98007500800f000082f21a0f68008300300e5bf28d00570930004b00dafd4c009a00dd090900a8f2270f8600ad008a0b1ff2b000480f4400730040fc5400b800430f4400c6f2370ab100ca00f30ecbf2cb002f0f4a001300c7fdaa00e400b50c4300e6f2b706bf00ea00f90668f2f2002e0dad003000b7f7cf00f600350cbe0008f31f0dd0000c017101cbf20f019c01', 'hex'), # DATA (tcp 1016, actual 112 +104
codecs.decode('5e00d4012dfdda001301a408e00019f3400c12002201fc0c4ff2570193096d0092018dfc3c7a62107e85688f818ff39a358ef99acb0fee06d47da2e2116a7c77f102a57bd1890a6a598b5ee2db0a0f64a384b28da105f29ca7eff9a137194560847d1565aa827ffc69705ffa8189f19f1f9ca10abbf2160f791a6e0dd8af0f723e062b6e84000a997780c100f6684b8016188780d7f44d0a', 'hex'), # raw 128 + 24
codecs.decode('5083790fd0fa1a089ef44b807572db9b0900d9795083397a8780ca0161091489ae7b7c134278a6004c00b68bcf80e9f98982509a0e01dbf02e6a441a21138a70ddeaf1f9b16a8f1025f2ceef74f369094b70b2fb3a176bb339f9860f6459f304bb679757b3fca891ba733c4c6444c72032f303131c9705004b3079bc0600a03a89c405fdc03205004b456254c6006fb276c20a00a94343c2fc30779505001b4f862804f27d51faff31c2cd007fa50141c12f1800085a9431c181c4fe83c10674c33275300600245c89fcc0ad07005b5c6b88040503a96267c1830700e9695d30c1c2510a0031ae57', 'hex'), # raw 128 +104
codecs.decode('5fa47a04007c7574510f039e80f0fd3bfefe9d55c3fa01c7841746ff06fa1ff2ee8ea07e787e0689c133c1c3c0c2ffc004c1fcae07005990578c040d03dc9350c0c4376a3a8623f2f29ea2c17c67b0928330726b6a83ff08c582afa8c5c3c3c1c3fec300895f0efdfd2809000bae21be5afd0c001cb68c59c20dc3fefda205004fb8150cfbc1030089bbffc30ef245bc467bc07404c288fd', 'hex'), # raw 128 +24
codecs.decode('0155bd46786445c3c130c0040091c52938c320f305c8a4c1ff7b05c08a63c3c2c1c2c3c13ac1c132c1ffc2c0c0c205c3c336050084c9306ec100b13f352c0700cacdf56b72f611f61a2d1605d5ef41a4fec0f818004c17c63e0dfef9c0fdfffe3b3649a0fac00c004ada856a6464c20b006cf83145c1c032c23d04109804d57617e28f07a0fe3bff3bfbfe0afc2ac0fdc138c01095f91bc543281101cbb0c19758fe9282c3c26270737997c1c0c2c0c204c70be27f0f2084c5fc070913ad1731c2c1c37b0125130c1ba958c049ff4e9bc6529262c1c290c2076ac2ed11e718a9554b068bc730b196', 'hex'), # raw 128 +104
codecs.decode('c2c1c2c1077dfc830210074929c1c910c5af81c0c1ffc2fe', 'hex'), # raw 24?
codecs.decode('5050827d0b000000dd054ba201003500a05701', 'hex'), # raw 43-24 (104)
codecs.decode('5050827d08000000d007fcf701003200', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201')#, verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
template = conn.get_user_template(14, 1)
self.assertEqual(template.size, 1010, "incorrect size %s" % template.size)
self.assertEqual(template.mark, b"4ab1535332310000...81c0c1ffc2fea057", "incorrect mark %s" % template.mark)
self.assertEqual(template.uid, 14, "incorrect uid %s" % template.uid)
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_live_connect(self, helper, socket):
""" check live_capture 12 bytes"""
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d64000000d007a3159663130000000000000000000000000000000000070000000000000006000000000000005d020000000000000f0c0000000000000100000000000000b80b000010270000a0860100b20b00000927000043840100000000000000', 'hex'), #sizes
codecs.decode('5050827d04020000dd05942c96631500f801000001000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003830380000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003832310000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833350000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833310000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833320000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003836000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000383432000000000000000000000000000000000000000000','hex'), #DATA directly(not ok)
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
codecs.decode('5050827d10000000dc053b59d0983500f401ae4301000000f19449000000120c07130906', 'hex'), # tcp PREPARE_DATA 1011
codecs.decode('5050827df8030000f401ae4301000000f19449000000120c07130906', 'hex'), # reg_event!
codecs.decode('5050827d08000000d007fcf701003200', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201')#, verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
for att in conn.live_capture():
#print att
conn.end_live_capture = True
self.assertEqual(att.user_id, "4822257", "incorrect user_id %s" % att.user_id)
conn.disconnect()
@patch('zk.base.socket')
@patch('zk.base.ZK_helper')
def test_tcp_live_connect_small(self, helper, socket):
""" check live_capture 32 bytes"""
helper.return_value.test_ping.return_value = True # ping simulated
helper.return_value.test_tcp.return_value = 0 # helper tcp ok
socket.return_value.recv.side_effect = [
codecs.decode('5050827d08000000d0075fb2cf450100', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d64000000d007a3159663130000000000000000000000000000000000070000000000000006000000000000005d020000000000000f0c0000000000000100000000000000b80b000010270000a0860100b20b00000927000043840100000000000000', 'hex'), #sizes
codecs.decode('5050827d04020000dd05942c96631500f801000001000e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003830380000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003832310000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833350000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833310000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003833320000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003836000000000000000000000000000000000000000000000c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000383432000000000000000000000000000000000000000000','hex'), #DATA directly(not ok)
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
codecs.decode('5050827d10000000dc053b59d0983500f401ae4301000000f19449000000120c07130906', 'hex'), # tcp PREPARE_DATA 1011
codecs.decode('5050827df8030000f401ae43010000003131343030363400000000000000000000000000000000000f00120b1d0c3703', 'hex'), # reg_event!
codecs.decode('5050827d08000000d007fcf701003200', 'hex'), # tcp CMD_ACK_OK
codecs.decode('5050827d08000000d00745b2cf451b00', 'hex'), # tcp random CMD_ACK_OK TODO: generate proper sequenced response
]
#begin
zk = ZK('192.168.1.201')#, verbose=True)
conn = zk.connect()
socket.return_value.send.assert_called_with(codecs.decode('5050827d08000000e80317fc00000000', 'hex'))
for att in conn.live_capture():
#print att
conn.end_live_capture = True
self.assertEqual(att.user_id, "1140064", "incorrect user_id %s" % att.user_id)
conn.disconnect()
if __name__ == '__main__':
unittest.main()
| 98.196078
| 1,107
| 0.800978
| 3,383
| 50,080
| 11.712977
| 0.099616
| 0.044215
| 0.017161
| 0.018549
| 0.479521
| 0.471495
| 0.44906
| 0.436063
| 0.430763
| 0.424757
| 0
| 0.518666
| 0.12492
| 50,080
| 509
| 1,108
| 98.388998
| 0.385519
| 0.090695
| 0
| 0.582547
| 0
| 0
| 0.605839
| 0.548159
| 0
| 1
| 0
| 0.001965
| 0.181604
| 1
| 0.049528
| false
| 0.009434
| 0.025943
| 0
| 0.07783
| 0.018868
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
978012f7001f62997ca4a87c8b56c83fbab37423
| 139
|
py
|
Python
|
Python/Minha_Biblioteca/criar_menu/clientes.py
|
Brun0C/projeto_python
|
ac544bfeb27447e820e4e2b0bd936de00d8b3348
|
[
"MIT"
] | null | null | null |
Python/Minha_Biblioteca/criar_menu/clientes.py
|
Brun0C/projeto_python
|
ac544bfeb27447e820e4e2b0bd936de00d8b3348
|
[
"MIT"
] | null | null | null |
Python/Minha_Biblioteca/criar_menu/clientes.py
|
Brun0C/projeto_python
|
ac544bfeb27447e820e4e2b0bd936de00d8b3348
|
[
"MIT"
] | null | null | null |
def cadastrar_cliente():
print('Operação realizada com sucesso')
def listar_clientes():
print('Operação realizada com sucesso')
| 17.375
| 43
| 0.733813
| 16
| 139
| 6.25
| 0.625
| 0.26
| 0.44
| 0.5
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165468
| 139
| 7
| 44
| 19.857143
| 0.862069
| 0
| 0
| 0.5
| 0
| 0
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c10f66fe953a31fbdb5b635638ba1cf969c9886e
| 349,023
|
py
|
Python
|
pyidf/unitary_equipment.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 19
|
2015-12-08T23:33:51.000Z
|
2022-01-31T04:41:10.000Z
|
pyidf/unitary_equipment.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 2
|
2019-10-04T10:57:00.000Z
|
2021-10-01T06:46:17.000Z
|
pyidf/unitary_equipment.py
|
marcelosalles/pyidf
|
c2f744211572b5e14e29522aac1421ba88addb0e
|
[
"Apache-2.0"
] | 7
|
2015-11-04T02:25:01.000Z
|
2021-12-08T03:14:28.000Z
|
""" Data objects in group "Unitary Equipment"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class AirLoopHvacUnitarySystem(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitarySystem`
AirloopHVAC:UnitarySystem is a generic HVAC system type that allows any
configuration of coils and/or fan. This object is a replacement of other
AirloopHVAC objects. This object can be used in outdoor air systems,
outdoor air units, air loops, and as zone equipment if desired.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'control type',
{'name': u'Control Type',
'pyname': u'control_type',
'default': u'Load',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Load',
u'SetPoint'],
'autocalculatable': False,
'type': 'alpha'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Multimode',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply fan object type',
{'name': u'Supply Fan Object Type',
'pyname': u'supply_fan_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume',
u'Fan:VariableVolume',
u'Fan:ComponentModel'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:SingleSpeed',
u'Coil:Heating:DX:MultiSpeed',
u'Coil:Heating:DX:VariableSpeed',
u'Coil:Heating:WaterToAirHeatPump:ParameterEstimation',
u'Coil:Heating:WaterToAirHeatPump:EquationFit',
u'Coil:Heating:WaterToAirHeatPump:VariableSpeedEquationFit',
u'Coil:Heating:Gas',
u'Coil:Heating:Gas:MultiStage',
u'Coil:Heating:Electric',
u'Coil:Heating:Electric:MultiStage',
u'Coil:Heating:Water',
u'Coil:Heating:Steam',
u'Coil:Heating:Desuperheater',
u'Coil:UserDefined'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dx heating coil sizing ratio',
{'name': u'DX Heating Coil Sizing Ratio',
'pyname': u'dx_heating_coil_sizing_ratio',
'default': 1.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'Coil:Cooling:DX:TwoSpeed',
u'Coil:Cooling:DX:MultiSpeed',
u'Coil:Cooling:DX:VariableSpeed',
u'Coil:Cooling:DX:TwoStageWithHumidityControlMode',
u'Coil:Cooling:DX:SingleSpeed:ThermalStorage',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted',
u'Coil:Cooling:WaterToAirHeatPump:ParameterEstimation',
u'Coil:Cooling:WaterToAirHeatPump:EquationFit',
u'Coil:Cooling:WaterToAirHeatPump:VariableSpeedEquationFit',
u'Coil:Cooling:Water',
u'Coil:Cooling:Water:DetailedGeometry',
u'CoilSystem:Cooling:Water:HeatExchangerAssisted',
u'Coil:UserDefined'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'use doas dx cooling coil',
{'name': u'Use DOAS DX Cooling Coil',
'pyname': u'use_doas_dx_cooling_coil',
'default': u'No',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Yes',
u'No'],
'autocalculatable': False,
'type': 'alpha'}),
(u'doas dx cooling coil leaving minimum air temperature',
{'name': u'DOAS DX Cooling Coil Leaving Minimum Air Temperature',
'pyname': u'doas_dx_cooling_coil_leaving_minimum_air_temperature',
'default': 2.0,
'maximum': 7.2,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'latent load control',
{'name': u'Latent Load Control',
'pyname': u'latent_load_control',
'default': u'SensibleOnlyLoadControl',
'required-field': False,
'autosizable': False,
'accepted-values': [u'SensibleOnlyLoadControl',
u'LatentOnlyLoadControl',
u'LatentWithSensibleLoadControl',
u'LatentOrSensibleLoadControl'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Desuperheater',
u'Coil:Heating:Water',
u'Coil:Heating:Steam',
u'Coil:UserDefined'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling supply air flow rate method',
{'name': u'Cooling Supply Air Flow Rate Method',
'pyname': u'cooling_supply_air_flow_rate_method',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedCoolingValue',
u'FlowPerCoolingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling supply air flow rate per floor area',
{'name': u'Cooling Supply Air Flow Rate Per Floor Area',
'pyname': u'cooling_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'cooling fraction of autosized cooling supply air flow rate',
{'name': u'Cooling Fraction of Autosized Cooling Supply Air Flow Rate',
'pyname': u'cooling_fraction_of_autosized_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cooling supply air flow rate per unit of capacity',
{'name': u'Cooling Supply Air Flow Rate Per Unit of Capacity',
'pyname': u'cooling_supply_air_flow_rate_per_unit_of_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'heating supply air flow rate method',
{'name': u'Heating Supply Air Flow Rate Method',
'pyname': u'heating_supply_air_flow_rate_method',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedHeatingValue',
u'FlowPerHeatingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate per floor area',
{'name': u'Heating Supply Air Flow Rate Per Floor Area',
'pyname': u'heating_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'heating fraction of autosized heating supply air flow rate',
{'name': u'Heating Fraction of Autosized Heating Supply Air Flow Rate',
'pyname': u'heating_fraction_of_autosized_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heating supply air flow rate per unit of capacity',
{'name': u'Heating Supply Air Flow Rate Per Unit of Capacity',
'pyname': u'heating_supply_air_flow_rate_per_unit_of_capacity',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'no load supply air flow rate method',
{'name': u'No Load Supply Air Flow Rate Method',
'pyname': u'no_load_supply_air_flow_rate_method',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'SupplyAirFlowRate',
u'FlowPerFloorArea',
u'FractionOfAutosizedCoolingValue',
u'FractionOfAutosizedHeatingValue',
u'FlowPerCoolingCapacity',
u'FlowPerHeatingCapacity'],
'autocalculatable': False,
'type': 'alpha'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate per floor area',
{'name': u'No Load Supply Air Flow Rate Per Floor Area',
'pyname': u'no_load_supply_air_flow_rate_per_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'no load fraction of autosized cooling supply air flow rate',
{'name': u'No Load Fraction of Autosized Cooling Supply Air Flow Rate',
'pyname': u'no_load_fraction_of_autosized_cooling_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'no load fraction of autosized heating supply air flow rate',
{'name': u'No Load Fraction of Autosized Heating Supply Air Flow Rate',
'pyname': u'no_load_fraction_of_autosized_heating_supply_air_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'no load supply air flow rate per unit of capacity during cooling operation',
{'name': u'No Load Supply Air Flow Rate Per Unit of Capacity During Cooling Operation',
'pyname': u'no_load_supply_air_flow_rate_per_unit_of_capacity_during_cooling_operation',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'no load supply air flow rate per unit of capacity during heating operation',
{'name': u'No Load Supply Air Flow Rate Per Unit of Capacity During Heating Operation',
'pyname': u'no_load_supply_air_flow_rate_per_unit_of_capacity_during_heating_operation',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-W'}),
(u'maximum supply air temperature',
{'name': u'Maximum Supply Air Temperature',
'pyname': u'maximum_supply_air_temperature',
'default': 80.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'outdoor dry-bulb temperature sensor node name',
{'name': u'Outdoor Dry-Bulb Temperature Sensor Node Name',
'pyname': u'outdoor_drybulb_temperature_sensor_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'maximum cycling rate',
{'name': u'Maximum Cycling Rate',
'pyname': u'maximum_cycling_rate',
'default': 2.5,
'maximum': 5.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'cycles/hr'}),
(u'heat pump time constant',
{'name': u'Heat Pump Time Constant',
'pyname': u'heat_pump_time_constant',
'default': 60.0,
'maximum': 500.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u's'}),
(u'fraction of on-cycle power use',
{'name': u'Fraction of On-Cycle Power Use',
'pyname': u'fraction_of_oncycle_power_use',
'default': 0.01,
'maximum': 0.05,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'heat pump fan delay time',
{'name': u'Heat Pump Fan Delay Time',
'pyname': u'heat_pump_fan_delay_time',
'default': 60.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u's'}),
(u'ancillary on-cycle electric power',
{'name': u'Ancillary On-Cycle Electric Power',
'pyname': u'ancillary_oncycle_electric_power',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'ancillary off-cycle electric power',
{'name': u'Ancillary Off-Cycle Electric Power',
'pyname': u'ancillary_offcycle_electric_power',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'design heat recovery water flow rate',
{'name': u'Design Heat Recovery Water Flow Rate',
'pyname': u'design_heat_recovery_water_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'maximum temperature for heat recovery',
{'name': u'Maximum Temperature for Heat Recovery',
'pyname': u'maximum_temperature_for_heat_recovery',
'default': 80.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'heat recovery water inlet node name',
{'name': u'Heat Recovery Water Inlet Node Name',
'pyname': u'heat_recovery_water_inlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'heat recovery water outlet node name',
{'name': u'Heat Recovery Water Outlet Node Name',
'pyname': u'heat_recovery_water_outlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'design specification multispeed object type',
{'name': u'Design Specification Multispeed Object Type',
'pyname': u'design_specification_multispeed_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'UnitarySystemPerformance:Multispeed'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design specification multispeed object name',
{'name': u'Design Specification Multispeed Object Name',
'pyname': u'design_specification_multispeed_object_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 14,
'name': u'AirLoopHVAC:UnitarySystem',
'pyname': u'AirLoopHvacUnitarySystem',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Unique name for the Unitary System.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def control_type(self):
"""field `Control Type`
| Load control requires a Controlling Zone name.
| SetPoint control requires set points at coil outlet node.
| Default value: Load
Args:
value (str): value for IDD Field `Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `control_type` or None if not set
"""
return self["Control Type"]
@control_type.setter
def control_type(self, value="Load"):
"""Corresponds to IDD field `Control Type`"""
self["Control Type"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
| Used only for Load based control
| Zone name where thermostat is located. Required when Control Type = Load.
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only
| Multimode = activate enhanced dehumidification mode
| as needed and meet sensible load. Valid only with
| cooling coil type CoilSystem:Cooling:DX:HeatExchangerAssisted.
| This control mode allows the heat exchanger to be turned
| on and off based on the zone dehumidification requirements.
| A ZoneControl:Humidistat object is also required.
| CoolReheat = cool beyond the dry-bulb setpoint.
| as required to meet the humidity setpoint. Valid with all
| cooling coil types. When a heat exchanger assisted cooling
| coil is used, the heat exchanger is locked on at all times.
| A ZoneControl:Humidistat object is also required.
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| A schedule value greater than zero (usually 1 is used) indicates that the unit is
| available to operate as needed. A value less than or equal to zero (usually zero
| is used) denotes that the unit must be off.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
| Enter the node name used as the inlet air node for the unitary system.
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
| Enter the node name used as the outlet air node for the unitary system.
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def supply_fan_object_type(self):
"""field `Supply Fan Object Type`
| Enter the type of supply air fan if included in the unitary system.
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. supply
| air fan operating mode schedule values greater than 0).
| Specify a Fan:OnOff object when the Supply Air Fan Operating Mode Schedule Name
| input field above is left blank.
| Specify a Fan:VariableVolume when modeling VAV systems which used setpoint based control
| if the fan is included in the unitary system object.
| The ComponentModel fan type may be substituted for the ConstantVolume or VariableVolume
| fan types when more detailed fan modeling is required.
| The variable or constant volume fan may be specified on the branch instead of contained
| within the unitary system object (i.e., this field may be blank for certain configurations).
Args:
value (str): value for IDD Field `Supply Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_object_type` or None if not set
"""
return self["Supply Fan Object Type"]
@supply_fan_object_type.setter
def supply_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Fan Object Type`"""
self["Supply Fan Object Type"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
| Enter the name of the supply air fan if included in the unitary system.
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Enter the type of supply air fan if included in the unitary system.
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value=None):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the cooling or heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of cooling or heating coil operation). Provide a schedule
| with non-zero values when high humidity control is specified.
| Leaving this schedule name blank will default to constant fan mode for the
| entire simulation period.
| This field is not used when set point based control is used where a set point
| controls the coil (i.e., model assumes constant fan mode operation).
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Enter the type of heating coil if included in the unitary system.
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Enter the name of the heating coil if included in the unitary system.
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def dx_heating_coil_sizing_ratio(self):
"""field `DX Heating Coil Sizing Ratio`
| Used to adjust heat pump heating capacity with respect to DX cooling capacity
| used only for heat pump configurations (i.e., a DX cooling and DX heating coil is used).
| Default value: 1.0
Args:
value (float): value for IDD Field `DX Heating Coil Sizing Ratio`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `dx_heating_coil_sizing_ratio` or None if not set
"""
return self["DX Heating Coil Sizing Ratio"]
@dx_heating_coil_sizing_ratio.setter
def dx_heating_coil_sizing_ratio(self, value=1.0):
"""Corresponds to IDD field `DX Heating Coil Sizing Ratio`"""
self["DX Heating Coil Sizing Ratio"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Enter the type of cooling coil if included in the unitary system.
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Enter the name of the cooling coil if included in the unitary system.
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def use_doas_dx_cooling_coil(self):
"""field `Use DOAS DX Cooling Coil`
| If Yes, the DX cooling coil runs as 100% DOAS DX coil.
| If No, the DX cooling coil runs as a regular DX coil.
| If left blank the default is regular dx coil.
| Default value: No
Args:
value (str): value for IDD Field `Use DOAS DX Cooling Coil`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `use_doas_dx_cooling_coil` or None if not set
"""
return self["Use DOAS DX Cooling Coil"]
@use_doas_dx_cooling_coil.setter
def use_doas_dx_cooling_coil(self, value="No"):
"""Corresponds to IDD field `Use DOAS DX Cooling Coil`"""
self["Use DOAS DX Cooling Coil"] = value
@property
def doas_dx_cooling_coil_leaving_minimum_air_temperature(self):
"""field `DOAS DX Cooling Coil Leaving Minimum Air Temperature`
| DX cooling coil leaving minimum air temperature defines the minimum DOAS DX cooling coil
| leaving air temperature that should be maintained to avoid frost formation. This input
| field is optional and only used along with the input field above.
| Units: C
| Default value: 2.0
| value <= 7.2
Args:
value (float): value for IDD Field `DOAS DX Cooling Coil Leaving Minimum Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `doas_dx_cooling_coil_leaving_minimum_air_temperature` or None if not set
"""
return self["DOAS DX Cooling Coil Leaving Minimum Air Temperature"]
@doas_dx_cooling_coil_leaving_minimum_air_temperature.setter
def doas_dx_cooling_coil_leaving_minimum_air_temperature(self, value=2.0):
"""Corresponds to IDD field `DOAS DX Cooling Coil Leaving Minimum Air
Temperature`"""
self["DOAS DX Cooling Coil Leaving Minimum Air Temperature"] = value
@property
def latent_load_control(self):
"""field `Latent Load Control`
| SensibleOnlyLoadControl is selected when thermostat control is used.
| LatentOnlyLoadControl is selected when humidistat control is used.
| LatentWithSensibleLoadControl is selected when thermostat control is used and
| dehumidification is required only when a sensible load exists.
| LatentOrSensibleLoadControl is selected when thermostat control is used and
| dehumidification is required any time the humidistat set point is exceeded.
| Default value: SensibleOnlyLoadControl
Args:
value (str): value for IDD Field `Latent Load Control`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `latent_load_control` or None if not set
"""
return self["Latent Load Control"]
@latent_load_control.setter
def latent_load_control(self, value="SensibleOnlyLoadControl"):
"""Corresponds to IDD field `Latent Load Control`"""
self["Latent Load Control"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| Enter the type of supplemental heating coil if included in the unitary system.
| Only required if dehumidification control type is "CoolReheat".
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Enter the name of the supplemental heating coil if included in the unitary system.
| Only required if dehumidification control type is "CoolReheat".
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def cooling_supply_air_flow_rate_method(self):
"""field `Cooling Supply Air Flow Rate Method`
| Enter the method used to determine the cooling supply air volume flow rate.
| None is used when a cooling coil is not included in the unitary system or this field may be blank.
| SupplyAirFlowRate is selected when the magnitude of the supply air volume is used.
| FlowPerFloorArea is selected when the supply air volume flow rate is based on total floor area
| served by the unitary system.
| FractionOfAutosizedCoolingValue is selected when the supply air volume is a fraction of the
| value determined by the simulation.
| FlowPerCoolingCapacity is selected when the supply air volume is a fraction of the cooling
| capacity as determined by the simulation.
Args:
value (str): value for IDD Field `Cooling Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_supply_air_flow_rate_method` or None if not set
"""
return self["Cooling Supply Air Flow Rate Method"]
@cooling_supply_air_flow_rate_method.setter
def cooling_supply_air_flow_rate_method(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Method`"""
self["Cooling Supply Air Flow Rate Method"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Enter the magnitude of the supply air volume flow rate during cooling operation.
| Required field when Cooling Supply Air Flow Rate Method is SupplyAirFlowRate.
| This field may be blank if a cooling coil is not included in the unitary system.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_floor_area(self):
"""field `Cooling Supply Air Flow Rate Per Floor Area`
| Enter the supply air volume flow rate per total floor area fraction.
| Required field when Cooling Supply Air Flow Rate Method is FlowPerFloorArea.
| This field may be blank if a cooling coil is not included in the unitary system.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Floor Area"]
@cooling_supply_air_flow_rate_per_floor_area.setter
def cooling_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Floor
Area`"""
self["Cooling Supply Air Flow Rate Per Floor Area"] = value
@property
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(self):
"""field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when Cooling Supply Air Flow Rate Method is FractionOfAutosizedCoolingValue.
| This field may be blank if a cooling coil is not included in the unitary system.
Args:
value (float): value for IDD Field `Cooling Fraction of Autosized Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_fraction_of_autosized_cooling_supply_air_flow_rate` or None if not set
"""
return self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"]
@cooling_fraction_of_autosized_cooling_supply_air_flow_rate.setter
def cooling_fraction_of_autosized_cooling_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Cooling Fraction of Autosized Cooling
Supply Air Flow Rate`"""
self[
"Cooling Fraction of Autosized Cooling Supply Air Flow Rate"] = value
@property
def cooling_supply_air_flow_rate_per_unit_of_capacity(self):
"""field `Cooling Supply Air Flow Rate Per Unit of Capacity`
| Enter the supply air volume flow rate as a fraction of the cooling capacity.
| Required field when Cooling Supply Air Flow Rate Method is FlowPerCoolingCapacity.
| This field may be blank if a cooling coil is not included in the unitary system.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Cooling Supply Air Flow Rate Per Unit of Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_supply_air_flow_rate_per_unit_of_capacity` or None if not set
"""
return self["Cooling Supply Air Flow Rate Per Unit of Capacity"]
@cooling_supply_air_flow_rate_per_unit_of_capacity.setter
def cooling_supply_air_flow_rate_per_unit_of_capacity(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate Per Unit of
Capacity`"""
self["Cooling Supply Air Flow Rate Per Unit of Capacity"] = value
@property
def heating_supply_air_flow_rate_method(self):
"""field `Heating Supply Air Flow Rate Method`
| Enter the method used to determine the heating supply air volume flow rate.
| None is used when a heating coil is not included in the unitary system or this field may be blank.
| SupplyAirFlowRate is selected when the magnitude of the supply air volume is used.
| FlowPerFloorArea is selected when the supply air volume flow rate is based on total floor area
| served by the unitary system.
| FractionOfAutosizedHeatingValue is selected when the supply air volume is a fraction of the
| value determined by the simulation.
| FlowPerHeatingCapacity is selected when the supply air volume is a fraction of the heating
| capacity as determined by the simulation.
Args:
value (str): value for IDD Field `Heating Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_supply_air_flow_rate_method` or None if not set
"""
return self["Heating Supply Air Flow Rate Method"]
@heating_supply_air_flow_rate_method.setter
def heating_supply_air_flow_rate_method(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Method`"""
self["Heating Supply Air Flow Rate Method"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Enter the magnitude of the supply air volume flow rate during heating operation.
| Required field when Heating Supply Air Flow Rate Method is SupplyAirFlowRate.
| This field may be blank if a heating coil is not included in the unitary system.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_floor_area(self):
"""field `Heating Supply Air Flow Rate Per Floor Area`
| Enter the supply air volume flow rate per total floor area fraction.
| Required field when Heating Supply Air Flow Rate Method is FlowPerFloorArea.
| This field may be blank if a heating coil is not included in the unitary system.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Floor Area"]
@heating_supply_air_flow_rate_per_floor_area.setter
def heating_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Floor
Area`"""
self["Heating Supply Air Flow Rate Per Floor Area"] = value
@property
def heating_fraction_of_autosized_heating_supply_air_flow_rate(self):
"""field `Heating Fraction of Autosized Heating Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the heating supply air flow rate.
| Required field when Heating Supply Air Flow Rate Method is FractionOfAutosizedHeatingValue.
| This field may be blank if a heating coil is not included in the unitary system.
Args:
value (float): value for IDD Field `Heating Fraction of Autosized Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_fraction_of_autosized_heating_supply_air_flow_rate` or None if not set
"""
return self[
"Heating Fraction of Autosized Heating Supply Air Flow Rate"]
@heating_fraction_of_autosized_heating_supply_air_flow_rate.setter
def heating_fraction_of_autosized_heating_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `Heating Fraction of Autosized Heating
Supply Air Flow Rate`"""
self[
"Heating Fraction of Autosized Heating Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate_per_unit_of_capacity(self):
"""field `Heating Supply Air Flow Rate Per Unit of Capacity`
| Enter the supply air volume flow rate as a fraction of the heating capacity.
| Required field when Heating Supply Air Flow Rate Method is FlowPerHeatingCapacity.
| This field may be blank if a heating coil is not included in the unitary system.
| Units: m3/s-W
Args:
value (float): value for IDD Field `Heating Supply Air Flow Rate Per Unit of Capacity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_supply_air_flow_rate_per_unit_of_capacity` or None if not set
"""
return self["Heating Supply Air Flow Rate Per Unit of Capacity"]
@heating_supply_air_flow_rate_per_unit_of_capacity.setter
def heating_supply_air_flow_rate_per_unit_of_capacity(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate Per Unit of
Capacity`"""
self["Heating Supply Air Flow Rate Per Unit of Capacity"] = value
@property
def no_load_supply_air_flow_rate_method(self):
"""field `No Load Supply Air Flow Rate Method`
| Enter the method used to determine the supply air volume flow rate when no cooling or heating is required.
| None is used when a cooling and heating coil is not included in the unitary system or this field may be blank.
| SupplyAirFlowRate is selected when the magnitude of the supply air volume is used.
| FlowPerFloorArea is selected when the supply air volume flow rate is based on total floor area
| served by the unitary system.
| FractionOfAutosizedCoolingValue is selected when the supply air volume is a fraction of the
| cooling value determined by the simulation.
| FractionOfAutosizedHeatingValue is selected when the supply air volume is a fraction of the
| heating value determined by the simulation.
| FlowPerCoolingCapacity is selected when the supply air volume is a fraction of the cooling
| capacity as determined by the simulation.
| FlowPerHeatingCapacity is selected when the supply air volume is a fraction of the heating
| capacity as determined by the simulation.
Args:
value (str): value for IDD Field `No Load Supply Air Flow Rate Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `no_load_supply_air_flow_rate_method` or None if not set
"""
return self["No Load Supply Air Flow Rate Method"]
@no_load_supply_air_flow_rate_method.setter
def no_load_supply_air_flow_rate_method(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Method`"""
self["No Load Supply Air Flow Rate Method"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Enter the magnitude of the supply air volume flow rate during when no cooling or heating is required.
| Required field when No Load Supply Air Flow Rate Method is SupplyAirFlowRate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate_per_floor_area(self):
"""field `No Load Supply Air Flow Rate Per Floor Area`
| Enter the supply air volume flow rate per total floor area fraction.
| Required field when No Load Supply Air Flow Rate Method is FlowPerFloorArea.
| Units: m3/s-m2
Args:
value (float): value for IDD Field `No Load Supply Air Flow Rate Per Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_supply_air_flow_rate_per_floor_area` or None if not set
"""
return self["No Load Supply Air Flow Rate Per Floor Area"]
@no_load_supply_air_flow_rate_per_floor_area.setter
def no_load_supply_air_flow_rate_per_floor_area(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Per Floor
Area`"""
self["No Load Supply Air Flow Rate Per Floor Area"] = value
@property
def no_load_fraction_of_autosized_cooling_supply_air_flow_rate(self):
"""field `No Load Fraction of Autosized Cooling Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the cooling supply air flow rate.
| Required field when No Load Supply Air Flow Rate Method is FractionOfAutosizedCoolingValue.
Args:
value (float): value for IDD Field `No Load Fraction of Autosized Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_fraction_of_autosized_cooling_supply_air_flow_rate` or None if not set
"""
return self[
"No Load Fraction of Autosized Cooling Supply Air Flow Rate"]
@no_load_fraction_of_autosized_cooling_supply_air_flow_rate.setter
def no_load_fraction_of_autosized_cooling_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `No Load Fraction of Autosized Cooling
Supply Air Flow Rate`"""
self[
"No Load Fraction of Autosized Cooling Supply Air Flow Rate"] = value
@property
def no_load_fraction_of_autosized_heating_supply_air_flow_rate(self):
"""field `No Load Fraction of Autosized Heating Supply Air Flow Rate`
| Enter the supply air volume flow rate as a fraction of the heating supply air flow rate.
| Required field when No Load Supply Air Flow Rate Method is FractionOfAutosizedHeatingValue.
Args:
value (float): value for IDD Field `No Load Fraction of Autosized Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_fraction_of_autosized_heating_supply_air_flow_rate` or None if not set
"""
return self[
"No Load Fraction of Autosized Heating Supply Air Flow Rate"]
@no_load_fraction_of_autosized_heating_supply_air_flow_rate.setter
def no_load_fraction_of_autosized_heating_supply_air_flow_rate(
self,
value=None):
"""Corresponds to IDD field `No Load Fraction of Autosized Heating
Supply Air Flow Rate`"""
self[
"No Load Fraction of Autosized Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate_per_unit_of_capacity_during_cooling_operation(
self):
"""field `No Load Supply Air Flow Rate Per Unit of Capacity During
Cooling Operation`
| Enter the supply air volume flow rate as a fraction of the cooling capacity.
| Required field when No Load Supply Air Flow Rate Method is FlowPerCoolingCapacity.
| Units: m3/s-W
Args:
value (float): value for IDD Field `No Load Supply Air Flow Rate Per Unit of Capacity During Cooling Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_supply_air_flow_rate_per_unit_of_capacity_during_cooling_operation` or None if not set
"""
return self[
"No Load Supply Air Flow Rate Per Unit of Capacity During Cooling Operation"]
@no_load_supply_air_flow_rate_per_unit_of_capacity_during_cooling_operation.setter
def no_load_supply_air_flow_rate_per_unit_of_capacity_during_cooling_operation(
self,
value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Per Unit of
Capacity During Cooling Operation`"""
self[
"No Load Supply Air Flow Rate Per Unit of Capacity During Cooling Operation"] = value
@property
def no_load_supply_air_flow_rate_per_unit_of_capacity_during_heating_operation(
self):
"""field `No Load Supply Air Flow Rate Per Unit of Capacity During
Heating Operation`
| Enter the supply air volume flow rate as a fraction of the heating capacity.
| Required field when No Load Supply Air Flow Rate Method is FlowPerHeatingCapacity.
| Units: m3/s-W
Args:
value (float): value for IDD Field `No Load Supply Air Flow Rate Per Unit of Capacity During Heating Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `no_load_supply_air_flow_rate_per_unit_of_capacity_during_heating_operation` or None if not set
"""
return self[
"No Load Supply Air Flow Rate Per Unit of Capacity During Heating Operation"]
@no_load_supply_air_flow_rate_per_unit_of_capacity_during_heating_operation.setter
def no_load_supply_air_flow_rate_per_unit_of_capacity_during_heating_operation(
self,
value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate Per Unit of
Capacity During Heating Operation`"""
self[
"No Load Supply Air Flow Rate Per Unit of Capacity During Heating Operation"] = value
@property
def maximum_supply_air_temperature(self):
"""field `Maximum Supply Air Temperature`
| Enter the maximum supply air temperature leaving the heating coil.
| Units: C
| Default value: 80.0
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature` or None if not set
"""
return self["Maximum Supply Air Temperature"]
@maximum_supply_air_temperature.setter
def maximum_supply_air_temperature(self, value=80.0):
"""Corresponds to IDD field `Maximum Supply Air Temperature`"""
self["Maximum Supply Air Temperature"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Enter the maximum outdoor dry-bulb temperature for supplemental heater operation.
| Units: C
| Default value: 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def outdoor_drybulb_temperature_sensor_node_name(self):
"""field `Outdoor Dry-Bulb Temperature Sensor Node Name`
| If this field is blank, outdoor temperature from the weather file is used.
| If this field is not blank, the node name specified determines the outdoor temperature used
| for controlling supplemental heater operation.
Args:
value (str): value for IDD Field `Outdoor Dry-Bulb Temperature Sensor Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_drybulb_temperature_sensor_node_name` or None if not set
"""
return self["Outdoor Dry-Bulb Temperature Sensor Node Name"]
@outdoor_drybulb_temperature_sensor_node_name.setter
def outdoor_drybulb_temperature_sensor_node_name(self, value=None):
""" Corresponds to IDD field `Outdoor Dry-Bulb Temperature Sensor Node Name`
"""
self["Outdoor Dry-Bulb Temperature Sensor Node Name"] = value
@property
def maximum_cycling_rate(self):
"""field `Maximum Cycling Rate`
| Used only for water source heat pump.
| The maximum on-off cycling rate for the compressor.
| Suggested value is 2.5 for a typical heat pump.
| Units: cycles/hr
| Default value: 2.5
| value <= 5.0
Args:
value (float): value for IDD Field `Maximum Cycling Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_cycling_rate` or None if not set
"""
return self["Maximum Cycling Rate"]
@maximum_cycling_rate.setter
def maximum_cycling_rate(self, value=2.5):
"""Corresponds to IDD field `Maximum Cycling Rate`"""
self["Maximum Cycling Rate"] = value
@property
def heat_pump_time_constant(self):
"""field `Heat Pump Time Constant`
| Used only for water source heat pump.
| Time constant for the cooling coil's capacity to reach steady state after startup.
| Suggested value is 60 for a typical heat pump.
| Units: s
| Default value: 60.0
| value <= 500.0
Args:
value (float): value for IDD Field `Heat Pump Time Constant`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_time_constant` or None if not set
"""
return self["Heat Pump Time Constant"]
@heat_pump_time_constant.setter
def heat_pump_time_constant(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Time Constant`"""
self["Heat Pump Time Constant"] = value
@property
def fraction_of_oncycle_power_use(self):
"""field `Fraction of On-Cycle Power Use`
| Used only for water source heat pump.
| The fraction of on-cycle power use to adjust the part load fraction based on
| the off-cycle power consumption due to crankcase heaters, controls, fans, and etc.
| Suggested value is 0.01 for a typical heat pump.
| Default value: 0.01
| value <= 0.05
Args:
value (float): value for IDD Field `Fraction of On-Cycle Power Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_oncycle_power_use` or None if not set
"""
return self["Fraction of On-Cycle Power Use"]
@fraction_of_oncycle_power_use.setter
def fraction_of_oncycle_power_use(self, value=0.01):
""" Corresponds to IDD field `Fraction of On-Cycle Power Use`
"""
self["Fraction of On-Cycle Power Use"] = value
@property
def heat_pump_fan_delay_time(self):
"""field `Heat Pump Fan Delay Time`
| Used only for water source heat pump.
| Programmed time delay for heat pump fan to shut off after compressor cycle off.
| Only required when fan operating mode is cycling.
| Enter 0 when fan operating mode is continuous.
| Units: s
| Default value: 60.0
Args:
value (float): value for IDD Field `Heat Pump Fan Delay Time`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_fan_delay_time` or None if not set
"""
return self["Heat Pump Fan Delay Time"]
@heat_pump_fan_delay_time.setter
def heat_pump_fan_delay_time(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Fan Delay Time`"""
self["Heat Pump Fan Delay Time"] = value
@property
def ancillary_oncycle_electric_power(self):
"""field `Ancillary On-Cycle Electric Power`
| Enter the value of ancillary electric power for controls or other devices consumed during the on cycle.
| Units: W
Args:
value (float): value for IDD Field `Ancillary On-Cycle Electric Power`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ancillary_oncycle_electric_power` or None if not set
"""
return self["Ancillary On-Cycle Electric Power"]
@ancillary_oncycle_electric_power.setter
def ancillary_oncycle_electric_power(self, value=None):
""" Corresponds to IDD field `Ancillary On-Cycle Electric Power`
"""
self["Ancillary On-Cycle Electric Power"] = value
@property
def ancillary_offcycle_electric_power(self):
"""field `Ancillary Off-Cycle Electric Power`
| Enter the value of ancillary electric power for controls or other devices consumed during the off cycle.
| Units: W
Args:
value (float): value for IDD Field `Ancillary Off-Cycle Electric Power`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `ancillary_offcycle_electric_power` or None if not set
"""
return self["Ancillary Off-Cycle Electric Power"]
@ancillary_offcycle_electric_power.setter
def ancillary_offcycle_electric_power(self, value=None):
""" Corresponds to IDD field `Ancillary Off-Cycle Electric Power`
"""
self["Ancillary Off-Cycle Electric Power"] = value
@property
def design_heat_recovery_water_flow_rate(self):
"""field `Design Heat Recovery Water Flow Rate`
| If non-zero, then the heat recovery inlet and outlet node names must be entered.
| Used for heat recovery to an EnergyPlus plant loop.
| Units: m3/s
Args:
value (float): value for IDD Field `Design Heat Recovery Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_heat_recovery_water_flow_rate` or None if not set
"""
return self["Design Heat Recovery Water Flow Rate"]
@design_heat_recovery_water_flow_rate.setter
def design_heat_recovery_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Heat Recovery Water Flow Rate`"""
self["Design Heat Recovery Water Flow Rate"] = value
@property
def maximum_temperature_for_heat_recovery(self):
"""field `Maximum Temperature for Heat Recovery`
| Enter the maximum heat recovery inlet temperature allowed for heat recovery.
| Units: C
| Default value: 80.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Temperature for Heat Recovery`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_temperature_for_heat_recovery` or None if not set
"""
return self["Maximum Temperature for Heat Recovery"]
@maximum_temperature_for_heat_recovery.setter
def maximum_temperature_for_heat_recovery(self, value=80.0):
"""Corresponds to IDD field `Maximum Temperature for Heat Recovery`"""
self["Maximum Temperature for Heat Recovery"] = value
@property
def heat_recovery_water_inlet_node_name(self):
"""field `Heat Recovery Water Inlet Node Name`
| Enter the name of the heat recovery water inlet node if plant water loop connections are present.
Args:
value (str): value for IDD Field `Heat Recovery Water Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_recovery_water_inlet_node_name` or None if not set
"""
return self["Heat Recovery Water Inlet Node Name"]
@heat_recovery_water_inlet_node_name.setter
def heat_recovery_water_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Heat Recovery Water Inlet Node Name`"""
self["Heat Recovery Water Inlet Node Name"] = value
@property
def heat_recovery_water_outlet_node_name(self):
"""field `Heat Recovery Water Outlet Node Name`
| Enter the name of the heat recovery water outlet node if plant water loop connections are present.
Args:
value (str): value for IDD Field `Heat Recovery Water Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_recovery_water_outlet_node_name` or None if not set
"""
return self["Heat Recovery Water Outlet Node Name"]
@heat_recovery_water_outlet_node_name.setter
def heat_recovery_water_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Heat Recovery Water Outlet Node Name`"""
self["Heat Recovery Water Outlet Node Name"] = value
@property
def design_specification_multispeed_object_type(self):
"""field `Design Specification Multispeed Object Type`
| Enter the type of performance specification object used to describe the multispeed coil.
Args:
value (str): value for IDD Field `Design Specification Multispeed Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_multispeed_object_type` or None if not set
"""
return self["Design Specification Multispeed Object Type"]
@design_specification_multispeed_object_type.setter
def design_specification_multispeed_object_type(self, value=None):
"""Corresponds to IDD field `Design Specification Multispeed Object
Type`"""
self["Design Specification Multispeed Object Type"] = value
@property
def design_specification_multispeed_object_name(self):
"""field `Design Specification Multispeed Object Name`
| Enter the name of the performance specification object used to describe the multispeed coil.
Args:
value (str): value for IDD Field `Design Specification Multispeed Object Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_specification_multispeed_object_name` or None if not set
"""
return self["Design Specification Multispeed Object Name"]
@design_specification_multispeed_object_name.setter
def design_specification_multispeed_object_name(self, value=None):
"""Corresponds to IDD field `Design Specification Multispeed Object
Name`"""
self["Design Specification Multispeed Object Name"] = value
class UnitarySystemPerformanceMultispeed(DataObject):
""" Corresponds to IDD object `UnitarySystemPerformance:Multispeed`
The UnitarySystemPerformance object is used to specify the air flow ratio at each
operating speed. This object is primarily used for multispeed DX and water coils to allow
operation at alternate flow rates different from those specified in the coil object.
"""
_schema = {'extensible-fields': OrderedDict([(u'heating speed 1 supply air flow ratio',
{'name': u'Heating Speed 1 Supply Air Flow Ratio',
'pyname': u'heating_speed_1_supply_air_flow_ratio',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real'}),
(u'cooling speed 1 supply air flow ratio',
{'name': u'Cooling Speed 1 Supply Air Flow Ratio',
'pyname': u'cooling_speed_1_supply_air_flow_ratio',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real'})]),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'number of speeds for heating',
{'name': u'Number of Speeds for Heating',
'pyname': u'number_of_speeds_for_heating',
'maximum': 10,
'required-field': True,
'autosizable': False,
'minimum': 0,
'autocalculatable': False,
'type': u'integer'}),
(u'number of speeds for cooling',
{'name': u'Number of Speeds for Cooling',
'pyname': u'number_of_speeds_for_cooling',
'maximum': 10,
'required-field': True,
'autosizable': False,
'minimum': 0,
'autocalculatable': False,
'type': u'integer'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 0,
'name': u'UnitarySystemPerformance:Multispeed',
'pyname': u'UnitarySystemPerformanceMultispeed',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def number_of_speeds_for_heating(self):
"""field `Number of Speeds for Heating`
| Used only for Multi speed coils
| Enter the number of the following sets of data for air flow rates.
| value <= 10
Args:
value (int): value for IDD Field `Number of Speeds for Heating`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_speeds_for_heating` or None if not set
"""
return self["Number of Speeds for Heating"]
@number_of_speeds_for_heating.setter
def number_of_speeds_for_heating(self, value=None):
"""Corresponds to IDD field `Number of Speeds for Heating`"""
self["Number of Speeds for Heating"] = value
@property
def number_of_speeds_for_cooling(self):
"""field `Number of Speeds for Cooling`
| Used only for Multi speed coils
| Enter the number of the following sets of data for air flow rates.
| value <= 10
Args:
value (int): value for IDD Field `Number of Speeds for Cooling`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_speeds_for_cooling` or None if not set
"""
return self["Number of Speeds for Cooling"]
@number_of_speeds_for_cooling.setter
def number_of_speeds_for_cooling(self, value=None):
"""Corresponds to IDD field `Number of Speeds for Cooling`"""
self["Number of Speeds for Cooling"] = value
def add_extensible(self,
heating_speed_1_supply_air_flow_ratio=None,
cooling_speed_1_supply_air_flow_ratio=None,
):
"""Add values for extensible fields.
Args:
heating_speed_1_supply_air_flow_ratio (float or "Autosize"): value for IDD Field `Heating Speed 1 Supply Air Flow Ratio`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
cooling_speed_1_supply_air_flow_ratio (float or "Autosize"): value for IDD Field `Cooling Speed 1 Supply Air Flow Ratio`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
"""
vals = []
heating_speed_1_supply_air_flow_ratio = self.check_value(
"Heating Speed 1 Supply Air Flow Ratio",
heating_speed_1_supply_air_flow_ratio)
vals.append(heating_speed_1_supply_air_flow_ratio)
cooling_speed_1_supply_air_flow_ratio = self.check_value(
"Cooling Speed 1 Supply Air Flow Ratio",
cooling_speed_1_supply_air_flow_ratio)
vals.append(cooling_speed_1_supply_air_flow_ratio)
self._extdata.append(vals)
@property
def extensibles(self):
"""Get list of all extensibles."""
return self._extdata
@extensibles.setter
def extensibles(self, extensibles):
"""Replaces extensible fields with `extensibles`
Args:
extensibles (list): nested list of extensible values
"""
self._extdata = []
for ext in extensibles:
self.add_extensible(*ext)
class AirLoopHvacUnitaryFurnaceHeatOnly(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:Unitary:Furnace:HeatOnly`
Unitary system, heating-only with constant volume supply fan (continuous or cycling)
and heating coil (gas, electric, hot water, or steam). Identical to
AirLoopHVAC:UnitaryHeatOnly.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'furnace air inlet node name',
{'name': u'Furnace Air Inlet Node Name',
'pyname': u'furnace_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'furnace air outlet node name',
{'name': u'Furnace Air Outlet Node Name',
'pyname': u'furnace_air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature',
{'name': u'Maximum Supply Air Temperature',
'pyname': u'maximum_supply_air_temperature',
'default': 80.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan object type',
{'name': u'Supply Fan Object Type',
'pyname': u'supply_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 13,
'name': u'AirLoopHVAC:Unitary:Furnace:HeatOnly',
'pyname': u'AirLoopHvacUnitaryFurnaceHeatOnly',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def furnace_air_inlet_node_name(self):
"""field `Furnace Air Inlet Node Name`
Args:
value (str): value for IDD Field `Furnace Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `furnace_air_inlet_node_name` or None if not set
"""
return self["Furnace Air Inlet Node Name"]
@furnace_air_inlet_node_name.setter
def furnace_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Furnace Air Inlet Node Name`"""
self["Furnace Air Inlet Node Name"] = value
@property
def furnace_air_outlet_node_name(self):
"""field `Furnace Air Outlet Node Name`
Args:
value (str): value for IDD Field `Furnace Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `furnace_air_outlet_node_name` or None if not set
"""
return self["Furnace Air Outlet Node Name"]
@furnace_air_outlet_node_name.setter
def furnace_air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Furnace Air Outlet Node Name`"""
self["Furnace Air Outlet Node Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of heating coil operation).
| Leaving this schedule name blank will default to cycling fan mode for the
| entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def maximum_supply_air_temperature(self):
"""field `Maximum Supply Air Temperature`
| Units: C
| Default value: 80.0
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature` or None if not set
"""
return self["Maximum Supply Air Temperature"]
@maximum_supply_air_temperature.setter
def maximum_supply_air_temperature(self, value=80.0):
"""Corresponds to IDD field `Maximum Supply Air Temperature`"""
self["Maximum Supply Air Temperature"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| This value should be > 0 and <= than the fan air flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_fan_object_type(self):
"""field `Supply Fan Object Type`
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. fan
| operating mode schedule values are greater than 0).
Args:
value (str): value for IDD Field `Supply Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_object_type` or None if not set
"""
return self["Supply Fan Object Type"]
@supply_fan_object_type.setter
def supply_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Fan Object Type`"""
self["Supply Fan Object Type"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
class AirLoopHvacUnitaryFurnaceHeatCool(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:Unitary:Furnace:HeatCool`
Unitary system, heating and cooling with constant volume supply fan (continuous or
cycling), direct expansion (DX) cooling coil, heating coil (gas, electric,
hot water, or steam), and optional reheat coil for dehumidification control.
Identical to AirLoopHVAC:UnitaryHeatCool.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'furnace air inlet node name',
{'name': u'Furnace Air Inlet Node Name',
'pyname': u'furnace_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'furnace air outlet node name',
{'name': u'Furnace Air Outlet Node Name',
'pyname': u'furnace_air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature',
{'name': u'Maximum Supply Air Temperature',
'pyname': u'maximum_supply_air_temperature',
'default': 80.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan object type',
{'name': u'Supply Fan Object Type',
'pyname': u'supply_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Multimode',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'}),
(u'reheat coil object type',
{'name': u'Reheat Coil Object Type',
'pyname': u'reheat_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Desuperheater',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'reheat coil name',
{'name': u'Reheat Coil Name',
'pyname': u'reheat_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 17,
'name': u'AirLoopHVAC:Unitary:Furnace:HeatCool',
'pyname': u'AirLoopHvacUnitaryFurnaceHeatCool',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| A schedule value greater than zero (usually 1 is used) indicates that the unit is
| available to operate as needed. A value less than or equal to zero (usually zero
| is used) denotes that the unit must be off.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def furnace_air_inlet_node_name(self):
"""field `Furnace Air Inlet Node Name`
Args:
value (str): value for IDD Field `Furnace Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `furnace_air_inlet_node_name` or None if not set
"""
return self["Furnace Air Inlet Node Name"]
@furnace_air_inlet_node_name.setter
def furnace_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Furnace Air Inlet Node Name`"""
self["Furnace Air Inlet Node Name"] = value
@property
def furnace_air_outlet_node_name(self):
"""field `Furnace Air Outlet Node Name`
Args:
value (str): value for IDD Field `Furnace Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `furnace_air_outlet_node_name` or None if not set
"""
return self["Furnace Air Outlet Node Name"]
@furnace_air_outlet_node_name.setter
def furnace_air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Furnace Air Outlet Node Name`"""
self["Furnace Air Outlet Node Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the cooling or heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of cooling or heating coil operation). Provide a schedule
| with non-zero values when high humidity control is specified.
| Leaving this schedule name blank will default to cycling fan mode for the
| entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def maximum_supply_air_temperature(self):
"""field `Maximum Supply Air Temperature`
| Units: C
| Default value: 80.0
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature` or None if not set
"""
return self["Maximum Supply Air Temperature"]
@maximum_supply_air_temperature.setter
def maximum_supply_air_temperature(self, value=80.0):
"""Corresponds to IDD field `Maximum Supply Air Temperature`"""
self["Maximum Supply Air Temperature"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow fate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Only used when fan operating mode is continuous (disregarded for cycling fan mode).
| This air flow rate is used when no heating or cooling is required (i.e., the DX coil
| compressor and heating coil are off). If this field is left blank or zero, the supply
| air flow rate from the previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_fan_object_type(self):
"""field `Supply Fan Object Type`
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. supply
| air fan operating mode schedule values not equal to 0).
Args:
value (str): value for IDD Field `Supply Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_object_type` or None if not set
"""
return self["Supply Fan Object Type"]
@supply_fan_object_type.setter
def supply_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Fan Object Type`"""
self["Supply Fan Object Type"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Only works with DX cooling coil types
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only
| Multimode = activate enhanced dehumidification mode
| as needed and meet sensible load. Valid only with
| cooling coil type CoilSystem:Cooling:DX:HeatExchangerAssisted.
| This control mode allows the heat exchanger to be turned
| on and off based on the zone dehumidification requirements.
| A ZoneControl:Humidistat object is also required.
| CoolReheat = cool beyond the dry-bulb setpoint.
| as required to meet the humidity setpoint. Valid with all
| cooling coil types. When a heat exchanger assisted cooling
| coil is used, the heat exchanger is locked on at all times.
| A ZoneControl:Humidistat object is also required.
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
@property
def reheat_coil_object_type(self):
"""field `Reheat Coil Object Type`
| Only required if dehumidification control type is "CoolReheat"
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Reheat Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `reheat_coil_object_type` or None if not set
"""
return self["Reheat Coil Object Type"]
@reheat_coil_object_type.setter
def reheat_coil_object_type(self, value=None):
"""Corresponds to IDD field `Reheat Coil Object Type`"""
self["Reheat Coil Object Type"] = value
@property
def reheat_coil_name(self):
"""field `Reheat Coil Name`
| Only required if dehumidification control type is "CoolReheat"
Args:
value (str): value for IDD Field `Reheat Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `reheat_coil_name` or None if not set
"""
return self["Reheat Coil Name"]
@reheat_coil_name.setter
def reheat_coil_name(self, value=None):
"""Corresponds to IDD field `Reheat Coil Name`"""
self["Reheat Coil Name"] = value
class AirLoopHvacUnitaryHeatOnly(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatOnly`
Unitary system, heating-only with constant volume supply fan (continuous or cycling)
and heating coil (gas, electric, hot water, or steam). Identical to
AirLoopHVAC:Unitary:Furnace:HeatOnly.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'unitary system air inlet node name',
{'name': u'Unitary System Air Inlet Node Name',
'pyname': u'unitary_system_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'unitary system air outlet node name',
{'name': u'Unitary System Air Outlet Node Name',
'pyname': u'unitary_system_air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature',
{'name': u'Maximum Supply Air Temperature',
'pyname': u'maximum_supply_air_temperature',
'default': 80.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan object type',
{'name': u'Supply Fan Object Type',
'pyname': u'supply_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 13,
'name': u'AirLoopHVAC:UnitaryHeatOnly',
'pyname': u'AirLoopHvacUnitaryHeatOnly',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def unitary_system_air_inlet_node_name(self):
"""field `Unitary System Air Inlet Node Name`
Args:
value (str): value for IDD Field `Unitary System Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `unitary_system_air_inlet_node_name` or None if not set
"""
return self["Unitary System Air Inlet Node Name"]
@unitary_system_air_inlet_node_name.setter
def unitary_system_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Unitary System Air Inlet Node Name`"""
self["Unitary System Air Inlet Node Name"] = value
@property
def unitary_system_air_outlet_node_name(self):
"""field `Unitary System Air Outlet Node Name`
Args:
value (str): value for IDD Field `Unitary System Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `unitary_system_air_outlet_node_name` or None if not set
"""
return self["Unitary System Air Outlet Node Name"]
@unitary_system_air_outlet_node_name.setter
def unitary_system_air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Unitary System Air Outlet Node Name`"""
self["Unitary System Air Outlet Node Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of heating coil operation).
| Leaving this schedule name blank will default to cycling fan mode for the
| entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def maximum_supply_air_temperature(self):
"""field `Maximum Supply Air Temperature`
| Units: C
| Default value: 80.0
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature` or None if not set
"""
return self["Maximum Supply Air Temperature"]
@maximum_supply_air_temperature.setter
def maximum_supply_air_temperature(self, value=80.0):
"""Corresponds to IDD field `Maximum Supply Air Temperature`"""
self["Maximum Supply Air Temperature"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| This value should be > 0 and <= than the fan air flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_fan_object_type(self):
"""field `Supply Fan Object Type`
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. fan
| operating mode schedule values are greater than 0).
Args:
value (str): value for IDD Field `Supply Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_object_type` or None if not set
"""
return self["Supply Fan Object Type"]
@supply_fan_object_type.setter
def supply_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Fan Object Type`"""
self["Supply Fan Object Type"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
class AirLoopHvacUnitaryHeatCool(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatCool`
Unitary system, heating and cooling with constant volume supply fan (continuous or
cycling), direct expansion (DX) cooling coil, heating coil (gas, electric,
hot water, or steam), and optional reheat coil for dehumidification control.
Identical to AirLoopHVAC:Unitary:Furnace:HeatCool.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'unitary system air inlet node name',
{'name': u'Unitary System Air Inlet Node Name',
'pyname': u'unitary_system_air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'unitary system air outlet node name',
{'name': u'Unitary System Air Outlet Node Name',
'pyname': u'unitary_system_air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature',
{'name': u'Maximum Supply Air Temperature',
'pyname': u'maximum_supply_air_temperature',
'default': 80.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply fan object type',
{'name': u'Supply Fan Object Type',
'pyname': u'supply_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply fan name',
{'name': u'Supply Fan Name',
'pyname': u'supply_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'Coil:Cooling:DX:VariableSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Multimode',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'}),
(u'reheat coil object type',
{'name': u'Reheat Coil Object Type',
'pyname': u'reheat_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Desuperheater',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'reheat coil name',
{'name': u'Reheat Coil Name',
'pyname': u'reheat_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 17,
'name': u'AirLoopHVAC:UnitaryHeatCool',
'pyname': u'AirLoopHvacUnitaryHeatCool',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def unitary_system_air_inlet_node_name(self):
"""field `Unitary System Air Inlet Node Name`
Args:
value (str): value for IDD Field `Unitary System Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `unitary_system_air_inlet_node_name` or None if not set
"""
return self["Unitary System Air Inlet Node Name"]
@unitary_system_air_inlet_node_name.setter
def unitary_system_air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Unitary System Air Inlet Node Name`"""
self["Unitary System Air Inlet Node Name"] = value
@property
def unitary_system_air_outlet_node_name(self):
"""field `Unitary System Air Outlet Node Name`
Args:
value (str): value for IDD Field `Unitary System Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `unitary_system_air_outlet_node_name` or None if not set
"""
return self["Unitary System Air Outlet Node Name"]
@unitary_system_air_outlet_node_name.setter
def unitary_system_air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Unitary System Air Outlet Node Name`"""
self["Unitary System Air Outlet Node Name"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the cooling or heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of cooling or heating coil operation). Provide a schedule
| with non-zero values when high humidity control is specified.
| Leaving this schedule name blank will default to cycling fan mode for the
| entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def maximum_supply_air_temperature(self):
"""field `Maximum Supply Air Temperature`
| Units: C
| Default value: 80.0
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature` or None if not set
"""
return self["Maximum Supply Air Temperature"]
@maximum_supply_air_temperature.setter
def maximum_supply_air_temperature(self, value=80.0):
"""Corresponds to IDD field `Maximum Supply Air Temperature`"""
self["Maximum Supply Air Temperature"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Only used when fan operating mode is continuous (disregarded for cycling fan mode).
| This air flow rate is used when no heating or cooling is required (i.e., the DX coil
| compressor and heating coil are off). If this field is left blank or zero, the supply
| air flow rate from the previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_fan_object_type(self):
"""field `Supply Fan Object Type`
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. supply
| air fan operating mode schedule values not equal to 0).
Args:
value (str): value for IDD Field `Supply Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_object_type` or None if not set
"""
return self["Supply Fan Object Type"]
@supply_fan_object_type.setter
def supply_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Fan Object Type`"""
self["Supply Fan Object Type"] = value
@property
def supply_fan_name(self):
"""field `Supply Fan Name`
Args:
value (str): value for IDD Field `Supply Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_fan_name` or None if not set
"""
return self["Supply Fan Name"]
@supply_fan_name.setter
def supply_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Fan Name`"""
self["Supply Fan Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Only works with DX cooling coil types or
| Coil:Cooling:DX:VariableSpeed.
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only
| Multimode = activate enhanced dehumidification mode
| as needed and meet sensible load. Valid only with
| cooling coil type CoilSystem:Cooling:DX:HeatExchangerAssisted.
| This control mode allows the heat exchanger to be turned
| on and off based on the zone dehumidification requirements.
| A ZoneControl:Humidistat object is also required.
| CoolReheat = cool beyond the dry-bulb setpoint.
| as required to meet the humidity setpoint. Valid with all
| cooling coil types. When a heat exchanger assisted Cooling
| coil is used, the heat exchanger is locked on at all times.
| A ZoneControl:Humidistat object is also required.
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
@property
def reheat_coil_object_type(self):
"""field `Reheat Coil Object Type`
| Only required if dehumidification control type is "CoolReheat"
| works with gas, electric, desuperheating, hot water and steam heating coils
Args:
value (str): value for IDD Field `Reheat Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `reheat_coil_object_type` or None if not set
"""
return self["Reheat Coil Object Type"]
@reheat_coil_object_type.setter
def reheat_coil_object_type(self, value=None):
"""Corresponds to IDD field `Reheat Coil Object Type`"""
self["Reheat Coil Object Type"] = value
@property
def reheat_coil_name(self):
"""field `Reheat Coil Name`
| Only required if dehumidification control type is "CoolReheat"
Args:
value (str): value for IDD Field `Reheat Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `reheat_coil_name` or None if not set
"""
return self["Reheat Coil Name"]
@reheat_coil_name.setter
def reheat_coil_name(self, value=None):
"""Corresponds to IDD field `Reheat Coil Name`"""
self["Reheat Coil Name"] = value
class AirLoopHvacUnitaryHeatPumpAirToAir(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatPump:AirToAir`
Unitary heat pump system, heating and cooling, single-speed with supply fan, direct
expansion (DX) cooling coil, DX heating coil (air-to-air heat pump), and supplemental
heating coil (gas, electric, hot water, or steam).
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:SingleSpeed',
u'Coil:Heating:DX:VariableSpeed'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'Coil:Cooling:DX:VariableSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature from supplemental heater',
{'name': u'Maximum Supply Air Temperature from Supplemental Heater',
'pyname': u'maximum_supply_air_temperature_from_supplemental_heater',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'maximum': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Multimode',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 19,
'name': u'AirLoopHVAC:UnitaryHeatPump:AirToAir',
'pyname': u'AirLoopHvacUnitaryHeatPumpAirToAir',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| A schedule value greater than zero (usually 1 is used) indicates that the unit is
| available to operate as needed. A value less than or equal to zero (usually zero
| is used) denotes that the unit must be off.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Must be less than or equal to the fan's maximum flow rate.
| Only used when fan operating mode is continuous (disregarded for cycling fan mode).
| This air flow rate is used when no heating or cooling is required (i.e., the DX coil
| compressor and supplemental heating coil are off). If this field is left blank or zero,
| the supply air flow rate from the previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Fan:ConstantVolume only works with continuous fan operating mode (i.e. fan
| operating mode schedule values are greater than 0 or the fan operating mode
| schedule name field is left blank).
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Needs to match in the fan object
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Only works with Coil:Heating:DX:SingleSpeed or
| Coil:Heating:DX:VariableSpeed
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Needs to match in the DX heating coil object
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Only works with Coil:Cooling:DX:SingleSpeed or
| CoilSystem:Cooling:DX:HeatExchangerAssisted or
| Coil:Cooling:DX:VariableSpeed
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match in the DX cooling coil object
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Needs to match in the supplemental heating coil object
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def maximum_supply_air_temperature_from_supplemental_heater(self):
"""field `Maximum Supply Air Temperature from Supplemental Heater`
| Units: C
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature from Supplemental Heater`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature_from_supplemental_heater` or None if not set
"""
return self["Maximum Supply Air Temperature from Supplemental Heater"]
@maximum_supply_air_temperature_from_supplemental_heater.setter
def maximum_supply_air_temperature_from_supplemental_heater(
self,
value=None):
"""Corresponds to IDD field `Maximum Supply Air Temperature from
Supplemental Heater`"""
self["Maximum Supply Air Temperature from Supplemental Heater"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Units: C
| Default value: 21.0
| value <= 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| A fan operating mode schedule value of 0 indicates cycling fan mode (supply air
| fan cycles on and off in tandem with the cooling or heating coil).
| Any other schedule value indicates continuous fan mode (supply air fan operates
| continuously regardless of cooling or heating coil operation).
| Leaving this schedule name blank will default to cycling fan mode for the
| entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only
| Multimode = activate enhanced dehumidification mode
| as needed and meet sensible load. Valid only with
| cooling coil type CoilSystem:Cooling:DX:HeatExchangerAssisted.
| This control mode allows the heat exchanger to be turned
| on and off based on the zone dehumidification requirements.
| A ZoneControl:Humidistat object is also required.
| CoolReheat = cool beyond the dry-bulb setpoint.
| as required to meet the humidity setpoint. Valid with all
| cooling coil types. When a heat exchanger assisted Cooling
| coil is used, the heat exchanger is locked on at all times.
| A ZoneControl:Humidistat object is also required.
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
class AirLoopHvacUnitaryHeatPumpWaterToAir(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatPump:WaterToAir`
Unitary heat pump system, heating and cooling, single-speed with constant volume
supply fan (continuous or cycling), direct expansion (DX) cooling coil, DX heating
coil (water-to-air heat pump), and supplemental heating coil (gas, electric,
hot water, or steam).
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'supply air flow rate',
{'name': u'Supply Air Flow Rate',
'pyname': u'supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:WaterToAirHeatPump:ParameterEstimation',
u'Coil:Heating:WaterToAirHeatPump:EquationFit',
u'Coil:Heating:WaterToAirHeatPump:VariableSpeedEquationFit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating convergence',
{'name': u'Heating Convergence',
'pyname': u'heating_convergence',
'default': 0.001,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:WaterToAirHeatPump:ParameterEstimation',
u'Coil:Cooling:WaterToAirHeatPump:EquationFit',
u'Coil:Cooling:WaterToAirHeatPump:VariableSpeedEquationFit'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling convergence',
{'name': u'Cooling Convergence',
'pyname': u'cooling_convergence',
'default': 0.001,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'maximum cycling rate',
{'name': u'Maximum Cycling Rate',
'pyname': u'maximum_cycling_rate',
'default': 2.5,
'maximum': 5.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'cycles/hr'}),
(u'heat pump time constant',
{'name': u'Heat Pump Time Constant',
'pyname': u'heat_pump_time_constant',
'default': 60.0,
'maximum': 500.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u's'}),
(u'fraction of on-cycle power use',
{'name': u'Fraction of On-Cycle Power Use',
'pyname': u'fraction_of_oncycle_power_use',
'default': 0.01,
'maximum': 0.05,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'heat pump fan delay time',
{'name': u'Heat Pump Fan Delay Time',
'pyname': u'heat_pump_fan_delay_time',
'default': 60.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u's'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature from supplemental heater',
{'name': u'Maximum Supply Air Temperature from Supplemental Heater',
'pyname': u'maximum_supply_air_temperature_from_supplemental_heater',
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'maximum': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'outdoor dry-bulb temperature sensor node name',
{'name': u'Outdoor Dry-Bulb Temperature Sensor Node Name',
'pyname': u'outdoor_drybulb_temperature_sensor_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'fan placement',
{'name': u'Fan Placement',
'pyname': u'fan_placement',
'default': u'BlowThrough',
'required-field': False,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heat pump coil water flow mode',
{'name': u'Heat Pump Coil Water Flow Mode',
'pyname': u'heat_pump_coil_water_flow_mode',
'default': u'Cycling',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Constant',
u'Cycling',
u'ConstantOnDemand'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 25,
'name': u'AirLoopHVAC:UnitaryHeatPump:WaterToAir',
'pyname': u'AirLoopHvacUnitaryHeatPumpWaterToAir',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def supply_air_flow_rate(self):
"""field `Supply Air Flow Rate`
| This value should be > 0 and <= than the fan air flow rate.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `supply_air_flow_rate` or None if not set
"""
return self["Supply Air Flow Rate"]
@supply_air_flow_rate.setter
def supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Supply Air Flow Rate`"""
self["Supply Air Flow Rate"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Only works with On/Off Fan
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Needs to match Fan:OnOff object
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Needs to match in the water-to-air heat pump heating coil object
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def heating_convergence(self):
"""field `Heating Convergence`
| Default value: 0.001
Args:
value (float): value for IDD Field `Heating Convergence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heating_convergence` or None if not set
"""
return self["Heating Convergence"]
@heating_convergence.setter
def heating_convergence(self, value=0.001):
"""Corresponds to IDD field `Heating Convergence`"""
self["Heating Convergence"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match in the water-to-air heat pump cooling coil object
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def cooling_convergence(self):
"""field `Cooling Convergence`
| Default value: 0.001
Args:
value (float): value for IDD Field `Cooling Convergence`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cooling_convergence` or None if not set
"""
return self["Cooling Convergence"]
@cooling_convergence.setter
def cooling_convergence(self, value=0.001):
"""Corresponds to IDD field `Cooling Convergence`"""
self["Cooling Convergence"] = value
@property
def maximum_cycling_rate(self):
"""field `Maximum Cycling Rate`
| The maximum on-off cycling rate for the compressor
| Suggested value is 2.5 for a typical heat pump
| Units: cycles/hr
| Default value: 2.5
| value <= 5.0
Args:
value (float): value for IDD Field `Maximum Cycling Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_cycling_rate` or None if not set
"""
return self["Maximum Cycling Rate"]
@maximum_cycling_rate.setter
def maximum_cycling_rate(self, value=2.5):
"""Corresponds to IDD field `Maximum Cycling Rate`"""
self["Maximum Cycling Rate"] = value
@property
def heat_pump_time_constant(self):
"""field `Heat Pump Time Constant`
| Time constant for the cooling coil's capacity to reach steady state after startup
| Suggested value is 60 for a typical heat pump
| Units: s
| Default value: 60.0
| value <= 500.0
Args:
value (float): value for IDD Field `Heat Pump Time Constant`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_time_constant` or None if not set
"""
return self["Heat Pump Time Constant"]
@heat_pump_time_constant.setter
def heat_pump_time_constant(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Time Constant`"""
self["Heat Pump Time Constant"] = value
@property
def fraction_of_oncycle_power_use(self):
"""field `Fraction of On-Cycle Power Use`
| The fraction of on-cycle power use to adjust the part load fraction based on
| the off-cycle power consumption due to crankcase heaters, controls, fans, and etc.
| Suggested value is 0.01 for a typical heat pump
| Default value: 0.01
| value <= 0.05
Args:
value (float): value for IDD Field `Fraction of On-Cycle Power Use`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_oncycle_power_use` or None if not set
"""
return self["Fraction of On-Cycle Power Use"]
@fraction_of_oncycle_power_use.setter
def fraction_of_oncycle_power_use(self, value=0.01):
""" Corresponds to IDD field `Fraction of On-Cycle Power Use`
"""
self["Fraction of On-Cycle Power Use"] = value
@property
def heat_pump_fan_delay_time(self):
"""field `Heat Pump Fan Delay Time`
| Programmed time delay for heat pump fan to shut off after compressor cycle off.
| Only required when fan operating mode is cycling
| Enter 0 when fan operating mode is continuous
| Units: s
| Default value: 60.0
Args:
value (float): value for IDD Field `Heat Pump Fan Delay Time`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `heat_pump_fan_delay_time` or None if not set
"""
return self["Heat Pump Fan Delay Time"]
@heat_pump_fan_delay_time.setter
def heat_pump_fan_delay_time(self, value=60.0):
"""Corresponds to IDD field `Heat Pump Fan Delay Time`"""
self["Heat Pump Fan Delay Time"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Needs to match in the supplemental heating coil object
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def maximum_supply_air_temperature_from_supplemental_heater(self):
"""field `Maximum Supply Air Temperature from Supplemental Heater`
| Units: C
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature from Supplemental Heater`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature_from_supplemental_heater` or None if not set
"""
return self["Maximum Supply Air Temperature from Supplemental Heater"]
@maximum_supply_air_temperature_from_supplemental_heater.setter
def maximum_supply_air_temperature_from_supplemental_heater(
self,
value=None):
"""Corresponds to IDD field `Maximum Supply Air Temperature from
Supplemental Heater`"""
self["Maximum Supply Air Temperature from Supplemental Heater"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Units: C
| Default value: 21.0
| value <= 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def outdoor_drybulb_temperature_sensor_node_name(self):
"""field `Outdoor Dry-Bulb Temperature Sensor Node Name`
Args:
value (str): value for IDD Field `Outdoor Dry-Bulb Temperature Sensor Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_drybulb_temperature_sensor_node_name` or None if not set
"""
return self["Outdoor Dry-Bulb Temperature Sensor Node Name"]
@outdoor_drybulb_temperature_sensor_node_name.setter
def outdoor_drybulb_temperature_sensor_node_name(self, value=None):
""" Corresponds to IDD field `Outdoor Dry-Bulb Temperature Sensor Node Name`
"""
self["Outdoor Dry-Bulb Temperature Sensor Node Name"] = value
@property
def fan_placement(self):
"""field `Fan Placement`
| Default value: BlowThrough
Args:
value (str): value for IDD Field `Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `fan_placement` or None if not set
"""
return self["Fan Placement"]
@fan_placement.setter
def fan_placement(self, value="BlowThrough"):
"""Corresponds to IDD field `Fan Placement`"""
self["Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule that controls fan operation. Schedule values of 0 denote
| cycling fan operation (fan cycles with cooling or heating coil). Schedule values greater
| than 0 denote constant fan operation (fan runs continually regardless of coil operation).
| The fan operating mode defaults to cycling fan operation if this field is left blank.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only
| CoolReheat = cool beyond the dry-bulb setpoint.
| as required to meet the humidity setpoint. Valid only with
| Coil:Cooling:WaterToAirHeatPump:EquationFit or
| Coil:Cooling:WaterToAirHeatPump:VariableSpeedEquationFit
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
@property
def heat_pump_coil_water_flow_mode(self):
"""field `Heat Pump Coil Water Flow Mode`
| used only when the heat pump coils are of the type WaterToAirHeatPump:EquationFit
| Constant results in 100% water flow regardless of compressor PLR
| Cycling results in water flow that matches compressor PLR
| ConstantOnDemand results in 100% water flow whenever the coil is on, but is 0% whenever the coil has no load
| Default value: Cycling
Args:
value (str): value for IDD Field `Heat Pump Coil Water Flow Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_pump_coil_water_flow_mode` or None if not set
"""
return self["Heat Pump Coil Water Flow Mode"]
@heat_pump_coil_water_flow_mode.setter
def heat_pump_coil_water_flow_mode(self, value="Cycling"):
"""Corresponds to IDD field `Heat Pump Coil Water Flow Mode`"""
self["Heat Pump Coil Water Flow Mode"] = value
class AirLoopHvacUnitaryHeatCoolVavchangeoverBypass(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatCool:VAVChangeoverBypass`
Unitary system, heating and cooling with constant volume supply fan (continuous or
cycling), direct expansion (DX) cooling coil, heating coil (gas, electric,
hot water, steam, or DX air-to-air heat pump) and bypass damper for variable volume
flow to terminal units. Used with AirTerminal:SingleDuct:VAV:HeatAndCool:Reheat
or AirTerminal:SingleDuct:VAV:HeatAndCool:NoReheat.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling supply air flow rate',
{'name': u'Cooling Supply Air Flow Rate',
'pyname': u'cooling_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating supply air flow rate',
{'name': u'Heating Supply Air Flow Rate',
'pyname': u'heating_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling outdoor air flow rate',
{'name': u'Cooling Outdoor Air Flow Rate',
'pyname': u'cooling_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating outdoor air flow rate',
{'name': u'Heating Outdoor Air Flow Rate',
'pyname': u'heating_outdoor_air_flow_rate',
'required-field': True,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'no load outdoor air flow rate',
{'name': u'No Load Outdoor Air Flow Rate',
'pyname': u'no_load_outdoor_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'outdoor air flow rate multiplier schedule name',
{'name': u'Outdoor Air Flow Rate Multiplier Schedule Name',
'pyname': u'outdoor_air_flow_rate_multiplier_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'bypass duct mixer node name',
{'name': u'Bypass Duct Mixer Node Name',
'pyname': u'bypass_duct_mixer_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'bypass duct splitter node name',
{'name': u'Bypass Duct Splitter Node Name',
'pyname': u'bypass_duct_splitter_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'outdoor air mixer object type',
{'name': u'Outdoor Air Mixer Object Type',
'pyname': u'outdoor_air_mixer_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'OutdoorAir:Mixer'],
'autocalculatable': False,
'type': 'alpha'}),
(u'outdoor air mixer name',
{'name': u'Outdoor Air Mixer Name',
'pyname': u'outdoor_air_mixer_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan placement',
{'name': u'Supply Air Fan Placement',
'pyname': u'supply_air_fan_placement',
'required-field': True,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:SingleSpeed',
u'CoilSystem:Cooling:DX:HeatExchangerAssisted',
u'Coil:Cooling:DX:TwoStageWithHumidityControlMode'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:SingleSpeed',
u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'priority control mode',
{'name': u'Priority Control Mode',
'pyname': u'priority_control_mode',
'default': u'ZonePriority',
'required-field': False,
'autosizable': False,
'accepted-values': [u'CoolingPriority',
u'HeatingPriority',
u'ZonePriority'],
'autocalculatable': False,
'type': 'alpha'}),
(u'minimum outlet air temperature during cooling operation',
{'name': u'Minimum Outlet Air Temperature During Cooling Operation',
'pyname': u'minimum_outlet_air_temperature_during_cooling_operation',
'default': 8.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outlet air temperature during heating operation',
{'name': u'Maximum Outlet Air Temperature During Heating Operation',
'pyname': u'maximum_outlet_air_temperature_during_heating_operation',
'default': 50.0,
'minimum>': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'dehumidification control type',
{'name': u'Dehumidification Control Type',
'pyname': u'dehumidification_control_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'Multimode',
u'CoolReheat'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 23,
'name': u'AirLoopHVAC:UnitaryHeatCool:VAVChangeoverBypass',
'pyname': u'AirLoopHvacUnitaryHeatCoolVavchangeoverBypass',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
| Enter a unique name for this unitary system.
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
| Enter the availability schedule name. Schedule values of zero denote system
| is Off. Non-zero schedule values denote system is available to operate.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def cooling_supply_air_flow_rate(self):
"""field `Cooling Supply Air Flow Rate`
| Enter the system air flow rate during cooling
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_supply_air_flow_rate` or None if not set
"""
return self["Cooling Supply Air Flow Rate"]
@cooling_supply_air_flow_rate.setter
def cooling_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Supply Air Flow Rate`"""
self["Cooling Supply Air Flow Rate"] = value
@property
def heating_supply_air_flow_rate(self):
"""field `Heating Supply Air Flow Rate`
| Enter the system air flow rate during heating
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_supply_air_flow_rate` or None if not set
"""
return self["Heating Supply Air Flow Rate"]
@heating_supply_air_flow_rate.setter
def heating_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Supply Air Flow Rate`"""
self["Heating Supply Air Flow Rate"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Only used when the supply air fan operating mode is continuous (see field
| Supply air fan operating mode schedule name). This system air flow rate
| is used when no heating or cooling is required and the coils are off.
| If this field is left blank or zero, the system air flow rate from the
| previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def cooling_outdoor_air_flow_rate(self):
"""field `Cooling Outdoor Air Flow Rate`
| Enter the outdoor air flow rate during
| cooling operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_outdoor_air_flow_rate` or None if not set
"""
return self["Cooling Outdoor Air Flow Rate"]
@cooling_outdoor_air_flow_rate.setter
def cooling_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Outdoor Air Flow Rate`"""
self["Cooling Outdoor Air Flow Rate"] = value
@property
def heating_outdoor_air_flow_rate(self):
"""field `Heating Outdoor Air Flow Rate`
| Enter the outdoor air flow rate during
| heating operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_outdoor_air_flow_rate` or None if not set
"""
return self["Heating Outdoor Air Flow Rate"]
@heating_outdoor_air_flow_rate.setter
def heating_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Outdoor Air Flow Rate`"""
self["Heating Outdoor Air Flow Rate"] = value
@property
def no_load_outdoor_air_flow_rate(self):
"""field `No Load Outdoor Air Flow Rate`
| Only used when the supply air fan operating mode is continuous (see field
| Supply air fan operating mode schedule name). This outdoor air flow rate
| is used when no heating or cooling is required and the coils are off.
| If this field is left blank or zero, the outdoor air flow rate from the previous on cycle
| (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Outdoor Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_outdoor_air_flow_rate` or None if not set
"""
return self["No Load Outdoor Air Flow Rate"]
@no_load_outdoor_air_flow_rate.setter
def no_load_outdoor_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Outdoor Air Flow Rate`"""
self["No Load Outdoor Air Flow Rate"] = value
@property
def outdoor_air_flow_rate_multiplier_schedule_name(self):
"""field `Outdoor Air Flow Rate Multiplier Schedule Name`
| Enter the name of a schedule that contains multipliers for the outdoor air
| flow rates. Schedule values must be from 0 to 1.
| If field is left blank, model assumes multiplier is 1 for the entire simulation period.
Args:
value (str): value for IDD Field `Outdoor Air Flow Rate Multiplier Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_flow_rate_multiplier_schedule_name` or None if not set
"""
return self["Outdoor Air Flow Rate Multiplier Schedule Name"]
@outdoor_air_flow_rate_multiplier_schedule_name.setter
def outdoor_air_flow_rate_multiplier_schedule_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Flow Rate Multiplier Schedule
Name`"""
self["Outdoor Air Flow Rate Multiplier Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
| Enter the name of the unitary system's air inlet node.
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def bypass_duct_mixer_node_name(self):
"""field `Bypass Duct Mixer Node Name`
| Enter the name of the bypass duct mixer node. This name should be the name
| of the return air node for the outdoor air mixer associated with this system.
| This node name must be different from the air inlet node name.
Args:
value (str): value for IDD Field `Bypass Duct Mixer Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `bypass_duct_mixer_node_name` or None if not set
"""
return self["Bypass Duct Mixer Node Name"]
@bypass_duct_mixer_node_name.setter
def bypass_duct_mixer_node_name(self, value=None):
"""Corresponds to IDD field `Bypass Duct Mixer Node Name`"""
self["Bypass Duct Mixer Node Name"] = value
@property
def bypass_duct_splitter_node_name(self):
"""field `Bypass Duct Splitter Node Name`
| Enter the name of the bypass duct splitter node.
| This splitter air node is the outlet node of the last component in this unitary
| system. For blow through fan placement, the splitter air node is the outlet
| node of the heating coil. For draw through fan placement, the splitter node
| is the outlet node of the supply air fan.
Args:
value (str): value for IDD Field `Bypass Duct Splitter Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `bypass_duct_splitter_node_name` or None if not set
"""
return self["Bypass Duct Splitter Node Name"]
@bypass_duct_splitter_node_name.setter
def bypass_duct_splitter_node_name(self, value=None):
"""Corresponds to IDD field `Bypass Duct Splitter Node Name`"""
self["Bypass Duct Splitter Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
| Enter the name of the unitary system's air outlet node.
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def outdoor_air_mixer_object_type(self):
"""field `Outdoor Air Mixer Object Type`
| currently only one type OutdoorAir:Mixer object is available.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_object_type` or None if not set
"""
return self["Outdoor Air Mixer Object Type"]
@outdoor_air_mixer_object_type.setter
def outdoor_air_mixer_object_type(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Object Type`"""
self["Outdoor Air Mixer Object Type"] = value
@property
def outdoor_air_mixer_name(self):
"""field `Outdoor Air Mixer Name`
| Enter the name of the outdoor air mixer used with this unitary system.
Args:
value (str): value for IDD Field `Outdoor Air Mixer Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `outdoor_air_mixer_name` or None if not set
"""
return self["Outdoor Air Mixer Name"]
@outdoor_air_mixer_name.setter
def outdoor_air_mixer_name(self, value=None):
"""Corresponds to IDD field `Outdoor Air Mixer Name`"""
self["Outdoor Air Mixer Name"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Specify the type of supply air fan used in this unitary system.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Enter the name of the supply air fan used in this unitary system.
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def supply_air_fan_placement(self):
"""field `Supply Air Fan Placement`
| Specify supply air fan placement as either blow through or draw through.
| BlowThrough means the supply air fan is located before the cooling
| coil. DrawThrough means the supply air fan is located after the heating coil.
Args:
value (str): value for IDD Field `Supply Air Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_placement` or None if not set
"""
return self["Supply Air Fan Placement"]
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Placement`"""
self["Supply Air Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule to control the supply air fan. Schedule Name values of zero
| mean that the supply air fan will cycle off if there is no cooling or heating load
| in any of the zones being served by this system. Non-zero schedule values mean
| that the supply air fan will operate continuously even if there is no cooling or
| heating load in any of the zones being served. If this field is left blank,
| the supply air fan will operate continuously for the entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Specify the type of cooling coil used in this unitary system.
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Enter the name of the cooling coil used in this unitary system.
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| works with DX, gas, electric, hot water and steam heating coils
| Specify the type of heating coil used in this unitary system.
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
| Enter the name of the heating coil used in this unitary system.
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def priority_control_mode(self):
"""field `Priority Control Mode`
| CoolingPriority = system provides cooling if any zone requires cooling.
| HeatingPriority = system provides heating if any zone requires heating.
| ZonePriority = system controlled based on the total number of zones
| requiring cooling or heating (highest number of zones
| in cooling or heating determines the system's operating mode).
| Default value: ZonePriority
Args:
value (str): value for IDD Field `Priority Control Mode`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `priority_control_mode` or None if not set
"""
return self["Priority Control Mode"]
@priority_control_mode.setter
def priority_control_mode(self, value="ZonePriority"):
"""Corresponds to IDD field `Priority Control Mode`"""
self["Priority Control Mode"] = value
@property
def minimum_outlet_air_temperature_during_cooling_operation(self):
"""field `Minimum Outlet Air Temperature During Cooling Operation`
| Specify the minimum outlet air temperature allowed for this unitary system
| during cooling operation. This value should be less than the maximum outlet
| air temperature during heating operation.
| Units: C
| Default value: 8.0
Args:
value (float): value for IDD Field `Minimum Outlet Air Temperature During Cooling Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_outlet_air_temperature_during_cooling_operation` or None if not set
"""
return self["Minimum Outlet Air Temperature During Cooling Operation"]
@minimum_outlet_air_temperature_during_cooling_operation.setter
def minimum_outlet_air_temperature_during_cooling_operation(
self,
value=8.0):
"""Corresponds to IDD field `Minimum Outlet Air Temperature During
Cooling Operation`"""
self["Minimum Outlet Air Temperature During Cooling Operation"] = value
@property
def maximum_outlet_air_temperature_during_heating_operation(self):
"""field `Maximum Outlet Air Temperature During Heating Operation`
| Specify the maximum outlet air temperature allowed for this unitary system
| during heating operation. This value should be greater than the minimum outlet
| air temperature during cooling operation.
| Units: C
| Default value: 50.0
Args:
value (float): value for IDD Field `Maximum Outlet Air Temperature During Heating Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outlet_air_temperature_during_heating_operation` or None if not set
"""
return self["Maximum Outlet Air Temperature During Heating Operation"]
@maximum_outlet_air_temperature_during_heating_operation.setter
def maximum_outlet_air_temperature_during_heating_operation(
self,
value=50.0):
"""Corresponds to IDD field `Maximum Outlet Air Temperature During
Heating Operation`"""
self["Maximum Outlet Air Temperature During Heating Operation"] = value
@property
def dehumidification_control_type(self):
"""field `Dehumidification Control Type`
| None = meet sensible load only.
| Multimode = activate enhanced dehumidification mode
| as needed and meet sensible load. Valid only with
| Coil:Cooling:DX:TwoStageWithHumidityControlMode.
| CoolReheat = cool beyond the Dry-Bulb temperature setpoint
| as required to meet the humidity setpoint. Valid only with
| Coil:Cooling:DX:TwoStageWithHumidityControlMode.
| For all dehumidification controls, the max humidity setpoint
| on this unitary system's air outlet node is used.
| This must be set using ZoneControl:Humidistat and
| SetpointManager:SingleZone:Humidity:Maximum,
| SetpointManager:MultiZone:Humidity:Maximum or
| SetpointManager:MultiZone:MaximumHumidity:Average objects.
| Default value: None
Args:
value (str): value for IDD Field `Dehumidification Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `dehumidification_control_type` or None if not set
"""
return self["Dehumidification Control Type"]
@dehumidification_control_type.setter
def dehumidification_control_type(self, value="None"):
"""Corresponds to IDD field `Dehumidification Control Type`"""
self["Dehumidification Control Type"] = value
class AirLoopHvacUnitaryHeatPumpAirToAirMultiSpeed(DataObject):
""" Corresponds to IDD object `AirLoopHVAC:UnitaryHeatPump:AirToAir:MultiSpeed`
Unitary system, heating and cooling, multi-speed with constant volume supply fan
(continuous or cycling), direct expansion (DX) cooling coil, heating coil
(DX air-to-air heat pump, gas, electric, hot water, or steam), and supplemental
heating coil (gas, electric, hot water, or steam).
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air inlet node name',
{'name': u'Air Inlet Node Name',
'pyname': u'air_inlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'air outlet node name',
{'name': u'Air Outlet Node Name',
'pyname': u'air_outlet_node_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'controlling zone or thermostat location',
{'name': u'Controlling Zone or Thermostat Location',
'pyname': u'controlling_zone_or_thermostat_location',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan object type',
{'name': u'Supply Air Fan Object Type',
'pyname': u'supply_air_fan_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Fan:OnOff',
u'Fan:ConstantVolume'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan name',
{'name': u'Supply Air Fan Name',
'pyname': u'supply_air_fan_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supply air fan placement',
{'name': u'Supply Air Fan Placement',
'pyname': u'supply_air_fan_placement',
'required-field': True,
'autosizable': False,
'accepted-values': [u'BlowThrough',
u'DrawThrough'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supply air fan operating mode schedule name',
{'name': u'Supply Air Fan Operating Mode Schedule Name',
'pyname': u'supply_air_fan_operating_mode_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'heating coil object type',
{'name': u'Heating Coil Object Type',
'pyname': u'heating_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Heating:DX:MultiSpeed',
u'Coil:Heating:Electric:MultiStage',
u'Coil:Heating:Gas:MultiStage',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'heating coil name',
{'name': u'Heating Coil Name',
'pyname': u'heating_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum outdoor dry-bulb temperature for compressor operation',
{'name': u'Minimum Outdoor Dry-Bulb Temperature for Compressor Operation',
'pyname': u'minimum_outdoor_drybulb_temperature_for_compressor_operation',
'default': -8.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'cooling coil object type',
{'name': u'Cooling Coil Object Type',
'pyname': u'cooling_coil_object_type',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Coil:Cooling:DX:MultiSpeed'],
'autocalculatable': False,
'type': 'alpha'}),
(u'cooling coil name',
{'name': u'Cooling Coil Name',
'pyname': u'cooling_coil_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'supplemental heating coil object type',
{'name': u'Supplemental Heating Coil Object Type',
'pyname': u'supplemental_heating_coil_object_type',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Coil:Heating:Gas',
u'Coil:Heating:Electric',
u'Coil:Heating:Water',
u'Coil:Heating:Steam'],
'autocalculatable': False,
'type': 'alpha'}),
(u'supplemental heating coil name',
{'name': u'Supplemental Heating Coil Name',
'pyname': u'supplemental_heating_coil_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum supply air temperature from supplemental heater',
{'name': u'Maximum Supply Air Temperature from Supplemental Heater',
'pyname': u'maximum_supply_air_temperature_from_supplemental_heater',
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor dry-bulb temperature for supplemental heater operation',
{'name': u'Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation',
'pyname': u'maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation',
'default': 21.0,
'maximum': 21.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'auxiliary on-cycle electric power',
{'name': u'Auxiliary On-Cycle Electric Power',
'pyname': u'auxiliary_oncycle_electric_power',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'auxiliary off-cycle electric power',
{'name': u'Auxiliary Off-Cycle Electric Power',
'pyname': u'auxiliary_offcycle_electric_power',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'W'}),
(u'design heat recovery water flow rate',
{'name': u'Design Heat Recovery Water Flow Rate',
'pyname': u'design_heat_recovery_water_flow_rate',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'maximum temperature for heat recovery',
{'name': u'Maximum Temperature for Heat Recovery',
'pyname': u'maximum_temperature_for_heat_recovery',
'default': 80.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real',
'unit': u'C'}),
(u'heat recovery water inlet node name',
{'name': u'Heat Recovery Water Inlet Node Name',
'pyname': u'heat_recovery_water_inlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'heat recovery water outlet node name',
{'name': u'Heat Recovery Water Outlet Node Name',
'pyname': u'heat_recovery_water_outlet_node_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'node'}),
(u'no load supply air flow rate',
{'name': u'No Load Supply Air Flow Rate',
'pyname': u'no_load_supply_air_flow_rate',
'required-field': False,
'autosizable': True,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'number of speeds for heating',
{'name': u'Number of Speeds for Heating',
'pyname': u'number_of_speeds_for_heating',
'maximum': 4,
'required-field': True,
'autosizable': False,
'minimum': 1,
'autocalculatable': False,
'type': u'integer'}),
(u'number of speeds for cooling',
{'name': u'Number of Speeds for Cooling',
'pyname': u'number_of_speeds_for_cooling',
'maximum': 4,
'required-field': True,
'autosizable': False,
'minimum': 2,
'autocalculatable': False,
'type': u'integer'}),
(u'heating speed 1 supply air flow rate',
{'name': u'Heating Speed 1 Supply Air Flow Rate',
'pyname': u'heating_speed_1_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating speed 2 supply air flow rate',
{'name': u'Heating Speed 2 Supply Air Flow Rate',
'pyname': u'heating_speed_2_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating speed 3 supply air flow rate',
{'name': u'Heating Speed 3 Supply Air Flow Rate',
'pyname': u'heating_speed_3_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'heating speed 4 supply air flow rate',
{'name': u'Heating Speed 4 Supply Air Flow Rate',
'pyname': u'heating_speed_4_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling speed 1 supply air flow rate',
{'name': u'Cooling Speed 1 Supply Air Flow Rate',
'pyname': u'cooling_speed_1_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling speed 2 supply air flow rate',
{'name': u'Cooling Speed 2 Supply Air Flow Rate',
'pyname': u'cooling_speed_2_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': True,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling speed 3 supply air flow rate',
{'name': u'Cooling Speed 3 Supply Air Flow Rate',
'pyname': u'cooling_speed_3_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'cooling speed 4 supply air flow rate',
{'name': u'Cooling Speed 4 Supply Air Flow Rate',
'pyname': u'cooling_speed_4_supply_air_flow_rate',
'minimum>': 0.0,
'required-field': False,
'autosizable': True,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'})]),
'format': None,
'group': u'Unitary Equipment',
'min-fields': 31,
'name': u'AirLoopHVAC:UnitaryHeatPump:AirToAir:MultiSpeed',
'pyname': u'AirLoopHvacUnitaryHeatPumpAirToAirMultiSpeed',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def air_inlet_node_name(self):
"""field `Air Inlet Node Name`
Args:
value (str): value for IDD Field `Air Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_inlet_node_name` or None if not set
"""
return self["Air Inlet Node Name"]
@air_inlet_node_name.setter
def air_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Inlet Node Name`"""
self["Air Inlet Node Name"] = value
@property
def air_outlet_node_name(self):
"""field `Air Outlet Node Name`
Args:
value (str): value for IDD Field `Air Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_outlet_node_name` or None if not set
"""
return self["Air Outlet Node Name"]
@air_outlet_node_name.setter
def air_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Air Outlet Node Name`"""
self["Air Outlet Node Name"] = value
@property
def controlling_zone_or_thermostat_location(self):
"""field `Controlling Zone or Thermostat Location`
Args:
value (str): value for IDD Field `Controlling Zone or Thermostat Location`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `controlling_zone_or_thermostat_location` or None if not set
"""
return self["Controlling Zone or Thermostat Location"]
@controlling_zone_or_thermostat_location.setter
def controlling_zone_or_thermostat_location(self, value=None):
"""Corresponds to IDD field `Controlling Zone or Thermostat
Location`"""
self["Controlling Zone or Thermostat Location"] = value
@property
def supply_air_fan_object_type(self):
"""field `Supply Air Fan Object Type`
| Select the type of supply air fan used in this unitary system.
Args:
value (str): value for IDD Field `Supply Air Fan Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_object_type` or None if not set
"""
return self["Supply Air Fan Object Type"]
@supply_air_fan_object_type.setter
def supply_air_fan_object_type(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Object Type`"""
self["Supply Air Fan Object Type"] = value
@property
def supply_air_fan_name(self):
"""field `Supply Air Fan Name`
| Enter the name of the supply air fan used in this unitary system.
Args:
value (str): value for IDD Field `Supply Air Fan Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_name` or None if not set
"""
return self["Supply Air Fan Name"]
@supply_air_fan_name.setter
def supply_air_fan_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Name`"""
self["Supply Air Fan Name"] = value
@property
def supply_air_fan_placement(self):
"""field `Supply Air Fan Placement`
| Select supply air fan placement as either BlowThrough or DrawThrough.
| BlowThrough means the supply air fan is located before the cooling
| coil. DrawThrough means the supply air fan is located after the heating coil
| but before the optional supplemental heating coil.
Args:
value (str): value for IDD Field `Supply Air Fan Placement`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_placement` or None if not set
"""
return self["Supply Air Fan Placement"]
@supply_air_fan_placement.setter
def supply_air_fan_placement(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Placement`"""
self["Supply Air Fan Placement"] = value
@property
def supply_air_fan_operating_mode_schedule_name(self):
"""field `Supply Air Fan Operating Mode Schedule Name`
| Enter the name of a schedule to control the supply air fan. Schedule values of zero
| mean that the supply air fan will cycle off if there is no cooling or heating load
| in the control zone. Non-zero schedule values mean that the supply air fan
| will operate continuously even if there is no cooling or heating load
| in the control zone. If this field is left blank, the supply air fan will
| operate continuously for the entire simulation period.
Args:
value (str): value for IDD Field `Supply Air Fan Operating Mode Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supply_air_fan_operating_mode_schedule_name` or None if not set
"""
return self["Supply Air Fan Operating Mode Schedule Name"]
@supply_air_fan_operating_mode_schedule_name.setter
def supply_air_fan_operating_mode_schedule_name(self, value=None):
"""Corresponds to IDD field `Supply Air Fan Operating Mode Schedule
Name`"""
self["Supply Air Fan Operating Mode Schedule Name"] = value
@property
def heating_coil_object_type(self):
"""field `Heating Coil Object Type`
| Multi Speed DX, Electric, Gas, and Single speed Water and Steam coils
Args:
value (str): value for IDD Field `Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_object_type` or None if not set
"""
return self["Heating Coil Object Type"]
@heating_coil_object_type.setter
def heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Heating Coil Object Type`"""
self["Heating Coil Object Type"] = value
@property
def heating_coil_name(self):
"""field `Heating Coil Name`
Args:
value (str): value for IDD Field `Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heating_coil_name` or None if not set
"""
return self["Heating Coil Name"]
@heating_coil_name.setter
def heating_coil_name(self, value=None):
"""Corresponds to IDD field `Heating Coil Name`"""
self["Heating Coil Name"] = value
@property
def minimum_outdoor_drybulb_temperature_for_compressor_operation(self):
"""field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
| Needs to match the corresponding minimum outdoor temperature defined
| in the DX heating coil object.
| Units: C
| Default value: -8.0
Args:
value (float): value for IDD Field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_outdoor_drybulb_temperature_for_compressor_operation` or None if not set
"""
return self[
"Minimum Outdoor Dry-Bulb Temperature for Compressor Operation"]
@minimum_outdoor_drybulb_temperature_for_compressor_operation.setter
def minimum_outdoor_drybulb_temperature_for_compressor_operation(
self,
value=-
8.0):
""" Corresponds to IDD field `Minimum Outdoor Dry-Bulb Temperature for Compressor Operation`
"""
self[
"Minimum Outdoor Dry-Bulb Temperature for Compressor Operation"] = value
@property
def cooling_coil_object_type(self):
"""field `Cooling Coil Object Type`
| Only works with Coil:Cooling:DX:MultiSpeed
Args:
value (str): value for IDD Field `Cooling Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_object_type` or None if not set
"""
return self["Cooling Coil Object Type"]
@cooling_coil_object_type.setter
def cooling_coil_object_type(self, value=None):
"""Corresponds to IDD field `Cooling Coil Object Type`"""
self["Cooling Coil Object Type"] = value
@property
def cooling_coil_name(self):
"""field `Cooling Coil Name`
| Needs to match in the DX Cooling Coil object
Args:
value (str): value for IDD Field `Cooling Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `cooling_coil_name` or None if not set
"""
return self["Cooling Coil Name"]
@cooling_coil_name.setter
def cooling_coil_name(self, value=None):
"""Corresponds to IDD field `Cooling Coil Name`"""
self["Cooling Coil Name"] = value
@property
def supplemental_heating_coil_object_type(self):
"""field `Supplemental Heating Coil Object Type`
| works with gas, electric, hot water and steam heating coils
Args:
value (str): value for IDD Field `Supplemental Heating Coil Object Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_object_type` or None if not set
"""
return self["Supplemental Heating Coil Object Type"]
@supplemental_heating_coil_object_type.setter
def supplemental_heating_coil_object_type(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Object Type`"""
self["Supplemental Heating Coil Object Type"] = value
@property
def supplemental_heating_coil_name(self):
"""field `Supplemental Heating Coil Name`
| Needs to match in the supplemental heating coil object
Args:
value (str): value for IDD Field `Supplemental Heating Coil Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `supplemental_heating_coil_name` or None if not set
"""
return self["Supplemental Heating Coil Name"]
@supplemental_heating_coil_name.setter
def supplemental_heating_coil_name(self, value=None):
"""Corresponds to IDD field `Supplemental Heating Coil Name`"""
self["Supplemental Heating Coil Name"] = value
@property
def maximum_supply_air_temperature_from_supplemental_heater(self):
"""field `Maximum Supply Air Temperature from Supplemental Heater`
| Units: C
Args:
value (float or "Autosize"): value for IDD Field `Maximum Supply Air Temperature from Supplemental Heater`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `maximum_supply_air_temperature_from_supplemental_heater` or None if not set
"""
return self["Maximum Supply Air Temperature from Supplemental Heater"]
@maximum_supply_air_temperature_from_supplemental_heater.setter
def maximum_supply_air_temperature_from_supplemental_heater(
self,
value=None):
"""Corresponds to IDD field `Maximum Supply Air Temperature from
Supplemental Heater`"""
self["Maximum Supply Air Temperature from Supplemental Heater"] = value
@property
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self):
"""field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
| Units: C
| Default value: 21.0
| value <= 21.0
Args:
value (float): value for IDD Field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation` or None if not set
"""
return self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"]
@maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation.setter
def maximum_outdoor_drybulb_temperature_for_supplemental_heater_operation(
self,
value=21.0):
""" Corresponds to IDD field `Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation`
"""
self[
"Maximum Outdoor Dry-Bulb Temperature for Supplemental Heater Operation"] = value
@property
def auxiliary_oncycle_electric_power(self):
"""field `Auxiliary On-Cycle Electric Power`
| Units: W
Args:
value (float): value for IDD Field `Auxiliary On-Cycle Electric Power`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `auxiliary_oncycle_electric_power` or None if not set
"""
return self["Auxiliary On-Cycle Electric Power"]
@auxiliary_oncycle_electric_power.setter
def auxiliary_oncycle_electric_power(self, value=None):
""" Corresponds to IDD field `Auxiliary On-Cycle Electric Power`
"""
self["Auxiliary On-Cycle Electric Power"] = value
@property
def auxiliary_offcycle_electric_power(self):
"""field `Auxiliary Off-Cycle Electric Power`
| Units: W
Args:
value (float): value for IDD Field `Auxiliary Off-Cycle Electric Power`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `auxiliary_offcycle_electric_power` or None if not set
"""
return self["Auxiliary Off-Cycle Electric Power"]
@auxiliary_offcycle_electric_power.setter
def auxiliary_offcycle_electric_power(self, value=None):
""" Corresponds to IDD field `Auxiliary Off-Cycle Electric Power`
"""
self["Auxiliary Off-Cycle Electric Power"] = value
@property
def design_heat_recovery_water_flow_rate(self):
"""field `Design Heat Recovery Water Flow Rate`
| If non-zero, then the heat recovery inlet and outlet node names must be entered.
| Used for heat recovery to an EnergyPlus plant loop.
| Units: m3/s
Args:
value (float): value for IDD Field `Design Heat Recovery Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_heat_recovery_water_flow_rate` or None if not set
"""
return self["Design Heat Recovery Water Flow Rate"]
@design_heat_recovery_water_flow_rate.setter
def design_heat_recovery_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Heat Recovery Water Flow Rate`"""
self["Design Heat Recovery Water Flow Rate"] = value
@property
def maximum_temperature_for_heat_recovery(self):
"""field `Maximum Temperature for Heat Recovery`
| Units: C
| Default value: 80.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Temperature for Heat Recovery`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_temperature_for_heat_recovery` or None if not set
"""
return self["Maximum Temperature for Heat Recovery"]
@maximum_temperature_for_heat_recovery.setter
def maximum_temperature_for_heat_recovery(self, value=80.0):
"""Corresponds to IDD field `Maximum Temperature for Heat Recovery`"""
self["Maximum Temperature for Heat Recovery"] = value
@property
def heat_recovery_water_inlet_node_name(self):
"""field `Heat Recovery Water Inlet Node Name`
Args:
value (str): value for IDD Field `Heat Recovery Water Inlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_recovery_water_inlet_node_name` or None if not set
"""
return self["Heat Recovery Water Inlet Node Name"]
@heat_recovery_water_inlet_node_name.setter
def heat_recovery_water_inlet_node_name(self, value=None):
"""Corresponds to IDD field `Heat Recovery Water Inlet Node Name`"""
self["Heat Recovery Water Inlet Node Name"] = value
@property
def heat_recovery_water_outlet_node_name(self):
"""field `Heat Recovery Water Outlet Node Name`
Args:
value (str): value for IDD Field `Heat Recovery Water Outlet Node Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `heat_recovery_water_outlet_node_name` or None if not set
"""
return self["Heat Recovery Water Outlet Node Name"]
@heat_recovery_water_outlet_node_name.setter
def heat_recovery_water_outlet_node_name(self, value=None):
"""Corresponds to IDD field `Heat Recovery Water Outlet Node Name`"""
self["Heat Recovery Water Outlet Node Name"] = value
@property
def no_load_supply_air_flow_rate(self):
"""field `No Load Supply Air Flow Rate`
| Only used when the supply air fan operating mode is continuous (see field
| Supply Air Fan Operating Mode Schedule Name). This air flow rate
| is used when no heating or cooling is required and the coils are off.
| If this field is left blank or zero, the supply air flow rate from the
| previous on cycle (either cooling or heating) is used.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `No Load Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `no_load_supply_air_flow_rate` or None if not set
"""
return self["No Load Supply Air Flow Rate"]
@no_load_supply_air_flow_rate.setter
def no_load_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `No Load Supply Air Flow Rate`"""
self["No Load Supply Air Flow Rate"] = value
@property
def number_of_speeds_for_heating(self):
"""field `Number of Speeds for Heating`
| Enter the number of the following sets of data for air flow rates.
| If Heating Coil Object Type is Coil:Heating:Water or Coil:Heating:Steam,
| this field should be 1.
| value >= 1
| value <= 4
Args:
value (int): value for IDD Field `Number of Speeds for Heating`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_speeds_for_heating` or None if not set
"""
return self["Number of Speeds for Heating"]
@number_of_speeds_for_heating.setter
def number_of_speeds_for_heating(self, value=None):
"""Corresponds to IDD field `Number of Speeds for Heating`"""
self["Number of Speeds for Heating"] = value
@property
def number_of_speeds_for_cooling(self):
"""field `Number of Speeds for Cooling`
| Enter the number of the following sets of data for air flow rates.
| value >= 2
| value <= 4
Args:
value (int): value for IDD Field `Number of Speeds for Cooling`
Raises:
ValueError: if `value` is not a valid value
Returns:
int: the value of `number_of_speeds_for_cooling` or None if not set
"""
return self["Number of Speeds for Cooling"]
@number_of_speeds_for_cooling.setter
def number_of_speeds_for_cooling(self, value=None):
"""Corresponds to IDD field `Number of Speeds for Cooling`"""
self["Number of Speeds for Cooling"] = value
@property
def heating_speed_1_supply_air_flow_rate(self):
"""field `Heating Speed 1 Supply Air Flow Rate`
| Enter the operating supply air flow rate during heating
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Speed 1 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_speed_1_supply_air_flow_rate` or None if not set
"""
return self["Heating Speed 1 Supply Air Flow Rate"]
@heating_speed_1_supply_air_flow_rate.setter
def heating_speed_1_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Speed 1 Supply Air Flow Rate`"""
self["Heating Speed 1 Supply Air Flow Rate"] = value
@property
def heating_speed_2_supply_air_flow_rate(self):
"""field `Heating Speed 2 Supply Air Flow Rate`
| Enter the operating supply air flow rate during heating
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Speed 2 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_speed_2_supply_air_flow_rate` or None if not set
"""
return self["Heating Speed 2 Supply Air Flow Rate"]
@heating_speed_2_supply_air_flow_rate.setter
def heating_speed_2_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Speed 2 Supply Air Flow Rate`"""
self["Heating Speed 2 Supply Air Flow Rate"] = value
@property
def heating_speed_3_supply_air_flow_rate(self):
"""field `Heating Speed 3 Supply Air Flow Rate`
| Enter the operating supply air flow rate during heating
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Speed 3 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_speed_3_supply_air_flow_rate` or None if not set
"""
return self["Heating Speed 3 Supply Air Flow Rate"]
@heating_speed_3_supply_air_flow_rate.setter
def heating_speed_3_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Speed 3 Supply Air Flow Rate`"""
self["Heating Speed 3 Supply Air Flow Rate"] = value
@property
def heating_speed_4_supply_air_flow_rate(self):
"""field `Heating Speed 4 Supply Air Flow Rate`
| Enter the operating supply air flow rate during heating
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Heating Speed 4 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `heating_speed_4_supply_air_flow_rate` or None if not set
"""
return self["Heating Speed 4 Supply Air Flow Rate"]
@heating_speed_4_supply_air_flow_rate.setter
def heating_speed_4_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Heating Speed 4 Supply Air Flow Rate`"""
self["Heating Speed 4 Supply Air Flow Rate"] = value
@property
def cooling_speed_1_supply_air_flow_rate(self):
"""field `Cooling Speed 1 Supply Air Flow Rate`
| Enter the operating supply air flow rate during cooling
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Speed 1 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_speed_1_supply_air_flow_rate` or None if not set
"""
return self["Cooling Speed 1 Supply Air Flow Rate"]
@cooling_speed_1_supply_air_flow_rate.setter
def cooling_speed_1_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Speed 1 Supply Air Flow Rate`"""
self["Cooling Speed 1 Supply Air Flow Rate"] = value
@property
def cooling_speed_2_supply_air_flow_rate(self):
"""field `Cooling Speed 2 Supply Air Flow Rate`
| Enter the operating supply air flow rate during cooling
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Speed 2 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_speed_2_supply_air_flow_rate` or None if not set
"""
return self["Cooling Speed 2 Supply Air Flow Rate"]
@cooling_speed_2_supply_air_flow_rate.setter
def cooling_speed_2_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Speed 2 Supply Air Flow Rate`"""
self["Cooling Speed 2 Supply Air Flow Rate"] = value
@property
def cooling_speed_3_supply_air_flow_rate(self):
"""field `Cooling Speed 3 Supply Air Flow Rate`
| Enter the operating supply air flow rate during cooling
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Speed 3 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_speed_3_supply_air_flow_rate` or None if not set
"""
return self["Cooling Speed 3 Supply Air Flow Rate"]
@cooling_speed_3_supply_air_flow_rate.setter
def cooling_speed_3_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Speed 3 Supply Air Flow Rate`"""
self["Cooling Speed 3 Supply Air Flow Rate"] = value
@property
def cooling_speed_4_supply_air_flow_rate(self):
"""field `Cooling Speed 4 Supply Air Flow Rate`
| Enter the operating supply air flow rate during cooling
| operation or specify autosize.
| Units: m3/s
Args:
value (float or "Autosize"): value for IDD Field `Cooling Speed 4 Supply Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autosize": the value of `cooling_speed_4_supply_air_flow_rate` or None if not set
"""
return self["Cooling Speed 4 Supply Air Flow Rate"]
@cooling_speed_4_supply_air_flow_rate.setter
def cooling_speed_4_supply_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Cooling Speed 4 Supply Air Flow Rate`"""
self["Cooling Speed 4 Supply Air Flow Rate"] = value
| 43.373058
| 132
| 0.503606
| 34,684
| 349,023
| 4.941645
| 0.014185
| 0.049464
| 0.037545
| 0.051775
| 0.954713
| 0.944485
| 0.927134
| 0.907577
| 0.883679
| 0.865026
| 0
| 0.003644
| 0.422895
| 349,023
| 8,046
| 133
| 43.378449
| 0.847282
| 0.320228
| 0
| 0.855157
| 0
| 0
| 0.261645
| 0.041611
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12983
| false
| 0.005282
| 0.000834
| 0
| 0.201001
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c13c4ad752ea38065f8a315cec7a6c7583421134
| 299
|
py
|
Python
|
extensions/.stubs/clrclasses/System/Net/Mime/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/System/Net/Mime/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/System/Net/Mime/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.System.Net.Mime import ContentDisposition
from __clrclasses__.System.Net.Mime import ContentType
from __clrclasses__.System.Net.Mime import DispositionTypeNames
from __clrclasses__.System.Net.Mime import MediaTypeNames
from __clrclasses__.System.Net.Mime import TransferEncoding
| 49.833333
| 63
| 0.882943
| 35
| 299
| 6.971429
| 0.314286
| 0.286885
| 0.409836
| 0.471311
| 0.67623
| 0.67623
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06689
| 299
| 5
| 64
| 59.8
| 0.874552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c1b00f367b6e4cf51b7807f6bcc4af5fb0526ae0
| 7,643
|
py
|
Python
|
tests/test_testlink.py
|
kman0/pytest-testlink
|
8a69f4288172b42f20e3585b400cc92e3d0fc9c9
|
[
"MIT"
] | 1
|
2020-12-03T06:45:53.000Z
|
2020-12-03T06:45:53.000Z
|
tests/test_testlink.py
|
manojklm/pytest-testlink
|
8a69f4288172b42f20e3585b400cc92e3d0fc9c9
|
[
"MIT"
] | null | null | null |
tests/test_testlink.py
|
manojklm/pytest-testlink
|
8a69f4288172b42f20e3585b400cc92e3d0fc9c9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
pytest_plugins = "pytester"
import os
import sys
import time
import pytest
from pytest_testlink import TLINK
DEMO_XMLRPC = "http://demo.testlink.org/latest/lib/api/xmlrpc/v1/xmlrpc.php"
DEMO_APIKEY = "583e35fa07bd59e81048640f5cee1897"
def init_ini(testdir):
testdir.tmpdir.ensure("pytest.ini").write("""[pytest]
testlink_file=testlink.ini""")
def init_pass(testdir):
testdir.makepyfile("""
import pytest
def test_pass(): assert 1
""")
def init_tests(testdir):
testdir.makepyfile("""
import pytest
def test_1(): assert 1
def test_2(): assert 0
def test_3(): pytest.skip()
@pytest.mark.xfail
def test_4(): assert 0
@pytest.mark.xfail
def test_5(): assert 1
""")
def init_testlink(testdir):
testdir.tmpdir.ensure("testlink.ini").write("""
[testlink-conf]
xmlrpc_url=""" + DEMO_XMLRPC + """
api_key=""" + DEMO_APIKEY + """
project=TS
test_plan=Automation
build_name=1
[testlink-maps]
ts-1=tests/test_testlink.py::test_1
ts-2=tests/test_testlink.py::test_2
ts-3=tests/test_testlink.py::test_3
ts-4=tests/test_testlink.py::test_4
ts-5=tests/test_testlink.py::test_5
"""
)
# # Tests
def test_no_testlink(testdir):
init_pass(testdir)
result = testdir.runpytest('--no-testlink', testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*testlink: disabled by --no-testlink*")
def test_no_configure_print(testdir):
init_pass(testdir)
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random(r'*testlink: "testlink_file" key was not found in [pytest? section*')
def test_testlink_file_not_found(testdir):
init_ini(testdir)
init_pass(testdir)
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*FileNotFoundError: testlink_file: testlink.ini*")
result.stdout.fnmatch_lines_random("*1 passed*")
result = testdir.runpytest('--testlink-exit-on-error', testdir.tmpdir)
# result.stdout.fnmatch_lines_random("*testlink: exit on failure enabled!*")
assert result.ret == 3
result.stderr.fnmatch_lines_random("*FileNotFoundError: testlink_file: testlink.ini*")
result.stderr.fnmatch_lines_random("*INTERNALERROR*")
def test_testlink_conf_section_not_found(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[pytest]""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*1 passed*")
result.stdout.fnmatch_lines_random('*section "testlink-conf" not found in ini file: testlink.ini*')
result = testdir.runpytest('--testlink-exit-on-error', testdir.tmpdir)
# result.stdout.fnmatch_lines_random("*testlink: exit on failure enabled!*")
assert result.ret == 3
result.stderr.fnmatch_lines_random('*section "testlink-conf" not found in ini file: testlink.ini*')
result.stderr.fnmatch_lines_random("*INTERNALERROR*")
def test_testlink_maps_section_not_found(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*1 passed*")
result.stdout.fnmatch_lines_random('*section "testlink-maps" not found in ini file: testlink.ini*')
@pytest.mark.parametrize(argnames="data",
argvalues=TLINK.ini_required_keys)
def test_testlink_missing_key(testdir, data):
init_ini(testdir)
init_pass(testdir)
keys = set(TLINK.ini_required_keys)
keys.remove(data)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]\n%s""" % ('\n'.join(k+"=dummy" for k in keys)))
print(open("testlink.ini").read())
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*Missing testlink ini keys: {'%s'}*" % data)
result = testdir.runpytest('--testlink-exit-on-error', testdir.tmpdir)
assert result.ret == 3
result.stderr.fnmatch_lines_random("*INTERNALERROR*")
result.stderr.fnmatch_lines_random("*Missing testlink ini keys: {'%s'}*" % data)
def test_ini_map_one_test(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]\n%s""" %
('\n'.join(k+"=dummy" for k in TLINK.ini_required_keys)) +
"""\n[testlink-maps]
test-1=tests/test_testlink.py::test_ini_map
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*1 passed*")
def test_ini_map_duplicate_keys(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]\n%s""" %
('\n'.join(k+"=dummy" for k in TLINK.ini_required_keys)) +
"""\n[testlink-maps]
test-1=tests/test_testlink.py::test_ini_map
test-1=tests/test_testlink.py::test_ini_map
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 3
result.stderr.fnmatch_lines_random("*configparser.DuplicateOptionError*")
result.stderr.fnmatch_lines_random("*option 'test-1' in section 'testlink-maps' already exists*")
def test_ini_map_duplicate_nodes(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]\n%s""" %
('\n'.join(k+"=dummy" for k in TLINK.ini_required_keys)) +
"""\n[testlink-maps]
test-1=tests/test_testlink.py::test_ini_map
test-2=tests/test_testlink.py::test_ini_map
""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*Duplicate node ids in testlink maps: ['tests/test_testlink.py::test_ini_map'*")
result = testdir.runpytest('--testlink-exit-on-error', testdir.tmpdir)
assert result.ret == 3
result.stderr.fnmatch_lines_random("*INTERNALERROR*")
result.stderr.fnmatch_lines_random("*Duplicate node ids in testlink maps:*")
def test_ini_map_no_nodes(testdir):
init_ini(testdir)
init_pass(testdir)
testdir.tmpdir.ensure("testlink.ini").write("""[testlink-conf]\n%s""" %
('\n'.join(k+"=dummy" for k in TLINK.ini_required_keys)) +
"""\n[testlink-maps]""")
result = testdir.runpytest(testdir.tmpdir)
assert result.ret == 0
result.stdout.fnmatch_lines_random("*No nodes found*")
result = testdir.runpytest('--testlink-exit-on-error', testdir.tmpdir)
assert result.ret == 3
result.stderr.fnmatch_lines_random("*INTERNALERROR*")
result.stderr.fnmatch_lines_random("*No nodes found!*")
def test_1(testdir):
init_ini(testdir)
def test_2():
assert 0
def test_3():
pytest.skip()
@pytest.mark.xfail
def test_4():
assert 0
@pytest.mark.xfail
def test_5():
assert 1
| 34.427928
| 120
| 0.642156
| 946
| 7,643
| 4.994715
| 0.120507
| 0.066032
| 0.099048
| 0.071111
| 0.801058
| 0.770794
| 0.747513
| 0.714074
| 0.707513
| 0.652275
| 0
| 0.013138
| 0.223211
| 7,643
| 221
| 121
| 34.58371
| 0.782719
| 0.023289
| 0
| 0.493671
| 0
| 0.006329
| 0.280959
| 0.063323
| 0
| 0
| 0
| 0
| 0.14557
| 1
| 0.120253
| false
| 0.101266
| 0.050633
| 0
| 0.170886
| 0.018987
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c1d8bf4f9e4db6f16fbef1deb576b8fb2f47307b
| 3,509
|
py
|
Python
|
tests/integrated_recognizers/de/test_de_date_recognizer.py
|
openredact/pii-identifier
|
97eaef56d6de59718501095d631a0fb49700e45a
|
[
"MIT"
] | 14
|
2020-07-31T18:45:29.000Z
|
2022-02-21T13:24:00.000Z
|
tests/integrated_recognizers/de/test_de_date_recognizer.py
|
openredact/pii-identifier
|
97eaef56d6de59718501095d631a0fb49700e45a
|
[
"MIT"
] | 7
|
2020-07-31T06:17:21.000Z
|
2021-05-23T08:40:24.000Z
|
tests/integrated_recognizers/de/test_de_date_recognizer.py
|
openredact/pii-identifier
|
97eaef56d6de59718501095d631a0fb49700e45a
|
[
"MIT"
] | 1
|
2020-09-30T01:42:57.000Z
|
2020-09-30T01:42:57.000Z
|
import pytest
from nerwhal.integrated_recognizers.de.de_date_recognizer import DeDateRecognizer
@pytest.fixture(scope="module")
def backend(setup_backend):
recognizer = DeDateRecognizer
backend = setup_backend(recognizer.BACKEND, language="de")
backend.register_recognizer(recognizer)
return backend
# DIN 1355-1
def test_current_1355_1(backend, embed):
text = "Der 25.06.1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_future_1355_1(backend, embed):
text = "Der 25.12.2978 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_historic_1355_1(backend, embed):
text = "Der 25.12.768 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_abbreviated_1355_1(backend, embed):
text = "Der 5.12.74 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_1355_1_invalid_month(backend, embed):
text = "Der 24.13.1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
def test_1355_1_invalid_day(backend, embed):
text = "Der 35.12.1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
# DIN 5008
def test_current_5008(backend, embed):
text = "Der 1999-06-25 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_future_5008(backend, embed):
text = "Der 2978-12-25 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_5008_invalid_month(backend, embed):
text = "Der 1999-13-25 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
def test_5008_invalid_day(backend, embed):
text = "Der 1999-12-35 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
# Written out
def test_current_written_out(backend, embed):
text = "Der 25. Juni 1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_future_written_out(backend, embed):
text = "Der 25. Dezember 2978 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_historic_written_out(backend, embed):
text = "Der 25. Dezember 768 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_abbreviated_written_out(backend, embed):
text = "Der 5. Dez. 74 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "Der DATE ist ein zufälliges Datum."
def test_with_day_written_out(backend, embed):
text = "Montag, 5. Dez. 74 ist ein zufälliges Datum."
ents = backend.run(text)
assert embed(text, ents) == "DATE ist ein zufälliges Datum."
def test_written_out_invalid_month(backend, embed):
text = "Der 24. Foobar 1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
def test_written_out_invalid_day(backend, embed):
text = "Der 35. Dezember 1999 ist ein zufälliges Datum."
ents = backend.run(text)
assert len(ents) == 0
| 28.528455
| 81
| 0.700484
| 518
| 3,509
| 4.629344
| 0.119691
| 0.105088
| 0.186822
| 0.245204
| 0.842369
| 0.794412
| 0.747289
| 0.656797
| 0.600917
| 0.600917
| 0
| 0.060713
| 0.192647
| 3,509
| 122
| 82
| 28.762295
| 0.78574
| 0.008834
| 0
| 0.434211
| 0
| 0
| 0.312608
| 0
| 0
| 0
| 0
| 0
| 0.223684
| 1
| 0.236842
| false
| 0
| 0.026316
| 0
| 0.276316
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7573f288c7425285b4c1b69dd60f3c650f5ff4d
| 17,849
|
py
|
Python
|
parser/team19/BDTytus/AST/Expresiones.py
|
strickergt128/tytus
|
93216dd9481ea0775da1d2967dc27be66872537f
|
[
"MIT"
] | null | null | null |
parser/team19/BDTytus/AST/Expresiones.py
|
strickergt128/tytus
|
93216dd9481ea0775da1d2967dc27be66872537f
|
[
"MIT"
] | null | null | null |
parser/team19/BDTytus/AST/Expresiones.py
|
strickergt128/tytus
|
93216dd9481ea0775da1d2967dc27be66872537f
|
[
"MIT"
] | null | null | null |
import AST.Nodo as Node
from TablaSimbolos.Tipos import *
from Errores.Nodo_Error import *
class Aritmetica(Node.Nodo):
def __init__(self, Exp1, Exp2, op, fila, col):
self.Exp1 = Exp1
self.Exp2 = Exp2
self.op = op
self.fila = fila
self.columna = col
def analizar(self, TS, Errores):
tipo1 = self.Exp1.analizar(TS, Errores)
tipo2 = self.Exp2.analizar(TS, Errores)
if self.op == '+':
if (
tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR or tipo1 == TIPO_DATOS.FLOAT or tipo1 == TIPO_DATOS.DOUBLE) and (
tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.FLOAT or tipo2 == TIPO_DATOS.DOUBLE):
if tipo1 == TIPO_DATOS.INT and tipo2 == TIPO_DATOS.INT:
return TIPO_DATOS.FLOAT
elif tipo1 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.CHAR:
return TIPO_DATOS.INT
return TIPO_DATOS.FLOAT
else:
return TIPO_DATOS.CHAR
elif self.op == '-' or self.op == '*':
if (
tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR or tipo1 == TIPO_DATOS.FLOAT or tipo1 == TIPO_DATOS.DOUBLE) and (
tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.FLOAT or tipo2 == TIPO_DATOS.DOUBLE):
if tipo1 == TIPO_DATOS.INT and tipo2 == TIPO_DATOS.INT:
return TIPO_DATOS.INT
elif tipo1 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.CHAR:
return TIPO_DATOS.INT
return TIPO_DATOS.FLOAT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1.name) + ' ' + self.op
+ ' ' + str(tipo2.name), self.fila, self.columna))
return TIPO_DATOS.ERROR
elif self.op == '/':
if (
tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR or tipo1 == TIPO_DATOS.FLOAT or tipo1 == TIPO_DATOS.DOUBLE) and (
tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.FLOAT or tipo2 == TIPO_DATOS.DOUBLE):
return TIPO_DATOS.FLOAT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1.nombre) + ' ' + self.op
+ ' ' + str(tipo2.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
elif self.op == '%':
if (tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR) and (
tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.INT):
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1.nombre) + ' ' + self.op
+ ' ' + str(tipo2.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
codigo += self.Exp1.getC3D(TS)
codigo += self.Exp2.getC3D(TS)
temp = TS.getTemp()
self.temporal = temp
codigo += TS.make3d(temp, self.Exp1.temporal, self.op, self.Exp2.temporal)
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
if self.Exp1 is not None:
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
if self.Exp2 is not None:
self.Exp2.graficarasc(nombrehijo, grafica)
class Relacional(Node.Nodo):
def __init__(self, Exp1, Exp2, op, fila, col):
self.Exp1 = Exp1
self.Exp2 = Exp2
self.op = op
self.fila = fila
self.columna = col
def analizar(self, TS, Errores):
tipo1 = self.Exp1.analizar(TS, Errores)
tipo2 = self.Exp2.analizar(TS, Errores)
if (
tipo1 == TIPO_DATOS.STRING or tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR or tipo1 == TIPO_DATOS.FLOAT or tipo1 == TIPO_DATOS.DOUBLE) and (
tipo2 == TIPO_DATOS.STRING or tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.FLOAT or tipo2 == TIPO_DATOS.DOUBLE):
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1) + ' ' + self.op
+ ' ' + str(tipo2), self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
codigo += self.Exp1.getC3D(TS)
codigo += self.Exp2.getC3D(TS)
temp = TS.getTemp()
self.temporal = temp
codigo += TS.make3d(temp, self.Exp1.temporal, self.op, self.Exp2.temporal)
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
if self.Exp1 is not None:
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
if self.Exp2 is not None:
self.Exp2.graficarasc(nombrehijo, grafica)
class primitivo(Node.Nodo):
def __init__(self, Valor, fila, col, tipo):
self.fila = fila
self.columna = col
self.valor = Valor
self.temporal = ""
if tipo == "decimal":
self.tipo = TIPO_DATOS.FLOAT
elif tipo == "entero":
self.tipo = TIPO_DATOS.INT
elif tipo == "char":
self.tipo = TIPO_DATOS.CHAR
elif tipo == "string":
self.tipo = TIPO_DATOS.STRING
def analizar(self, TS, Errores):
return self.tipo
def getC3D(self, TS):
if self.tipo == TIPO_DATOS.CHAR:
self.temporal = '\'' + str(self.valor) + '\''
elif self.tipo == TIPO_DATOS.STRING:
self.temporal = '\"' + str(self.valor) + '\"'
else:
self.temporal = str(self.valor)
return ""
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
grafica.node('NodeV' + str(id(self)), label=(str(self.valor)))
grafica.edge(nombrehijo, 'NodeV' + str(id(self)))
class variable(Node.Nodo):
def __init__(self, nombre, fila, col):
self.fila = fila
self.columna = col
self.nombre = nombre
self.temporal = ""
def analizar(self, TS, Errores):
simbolo = TS.obtener(self.nombre)
if simbolo is None:
Errores.insertar(
Nodo_Error("Semantico", "No existe variable " + self.nombre, self.fila, self.columna))
return TIPO_DATOS.ERROR
return simbolo.tipo
def getC3D(self, TS):
codigo = ""
simbolo = TS.obtener(self.nombre)
self.temporal = simbolo.posicion
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
grafica.node('NodeI' + str(id(self)), label=(str(self.nombre)))
grafica.edge(nombrehijo, 'NodeI' + str(id(self)))
class bitabit(Node.Nodo):
def __init__(self, Exp1, Exp2, op, fila, col):
self.fila = fila
self.columna = col
self.Exp1 = Exp1
self.Exp2 = Exp2
self.op = op
self.temporal = ""
def analizar(self, TS, Errores):
tipo1 = self.Exp1.analizar(TS, Errores)
tipo2 = self.Exp2.analizar(TS, Errores)
if (
tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR) and (
tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.CHAR):
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1.nombre) + ' ' + self.op
+ ' ' + str(tipo2.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
codigo += self.Exp1.getC3D(TS)
codigo += self.Exp2.getC3D(TS)
temp = TS.getTemp()
self.temporal = temp
codigo += TS.make3d(temp, self.Exp1.temporal, self.op, self.Exp2.temporal)
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
if self.Exp1 is not None:
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
if self.Exp2 is not None:
self.Exp2.graficarasc(nombrehijo, grafica)
class logica(Node.Nodo):
def __init__(self, Exp1, Exp2, op, fila, col):
self.fila = fila
self.columna = col
self.Exp1 = Exp1
self.Exp2 = Exp2
self.op = op
def analizar(self, TS, Errores):
tipo1 = self.Exp1.analizar(TS, Errores)
tipo2 = self.Exp2.analizar(TS, Errores)
if (
tipo1 == TIPO_DATOS.INT or tipo1 == TIPO_DATOS.CHAR or tipo1 == TIPO_DATOS.FLOAT or tipo1 == TIPO_DATOS.DOUBLE) and (
tipo2 == TIPO_DATOS.INT or tipo2 == TIPO_DATOS.CHAR or tipo2 == TIPO_DATOS.FLOAT or tipo2 == TIPO_DATOS.DOUBLE):
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operacion entre " + str(tipo1.nombre) + ' ' + self.op
+ ' ' + str(tipo2.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
codigo += self.Exp1.getC3D(TS)
codigo += self.Exp2.getC3D(TS)
temp = TS.getTemp()
self.temporal = temp
codigo += TS.make3d(temp, self.Exp1.temporal, self.op, self.Exp2.temporal)
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
if self.Exp1 is not None:
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
if self.Exp2 is not None:
self.Exp2.graficarasc(nombrehijo, grafica)
class incremento(Node.Nodo):
def __init__(self, Exp1, op, primero, fila, col):
self.fila = fila
self.columna = col
self.Exp1 = Exp1
self.primero = primero
self.op = op
def analizar(self, TS, Errores):
tipo = self.Exp1.analizar(TS, Errores)
if tipo == TIPO_DATOS.INT or tipo == TIPO_DATOS.CHAR or tipo == TIPO_DATOS.FLOAT or tipo == TIPO_DATOS.DOUBLE:
return tipo
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible incremento/decremento", self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
if self.op == '++':
operador = '+'
else:
operador = '-'
if self.primero:
codigo += self.Exp1.getC3D(TS)
temporal = self.Exp1.temporal
codigo += TS.make3d(temporal, temporal, operador, 1)
self.temporal = temporal
return codigo
else:
codigo += self.Exp1.getC3D(TS)
temporal = self.Exp1.temporal
temporal2 = TS.getTemp()
codigo += temporal2 + '=' + temporal + ';\n'
codigo += TS.make3d(temporal, temporal, operador, 1)
self.temporal = temporal2
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
if self.primero:
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
self.Exp1.graficarasc(nombrehijo, grafica)
else:
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
class unario(Node.Nodo):
def __init__(self, Exp, op, fila, col):
self.fila = fila
self.columna = col
self.Exp = Exp
self.op = op
def analizar(self, TS, Errores):
tipo = self.Exp.analizar(TS, Errores)
if self.op == '~':
if tipo == TIPO_DATOS.INT or tipo == TIPO_DATOS.CHAR:
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operador unario " + self.op + ' con tipo de dato ' +
str(tipo.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
else:
if tipo == TIPO_DATOS.INT or tipo == TIPO_DATOS.CHAR or tipo == TIPO_DATOS.DOUBLE or tipo == TIPO_DATOS.FLOAT:
return TIPO_DATOS.INT
else:
Errores.insertar(
Nodo_Error("Semantico", "No es posible operador unario " + self.op + ' con tipo de dato ' +
str(tipo.nombre), self.fila, self.columna))
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
codigo += self.Exp.getC3D(TS)
temp = TS.getTemp()
self.temporal = temp
codigo += self.temporal + ' = ' + str(self.op) + ' ' + self.Exp.temporal + '; \n'
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
grafica.edge(padre, nombrehijo)
grafica.node('NodeE1' + str(id(self)), label=(str(self.op)))
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
if self.Exp is not None:
self.Exp.graficarasc(nombrehijo, grafica)
class ternario(Node.Nodo):
def __init__(self, Cond, Exp1, Exp2, fila, col):
self.fila = fila
self.columna = col
self.Cond = Cond
self.Exp1 = Exp1
self.Exp2 = Exp2
def analizar(self, TS, Errores):
tipo = self.analizar(TS, Errores)
if not (
tipo == TIPO_DATOS.INT or tipo == TIPO_DATOS.CHAR or tipo == TIPO_DATOS.DOUBLE or tipo == TIPO_DATOS.FLOAT):
Errores.insertar(
Nodo_Error("Semantico", "La el tipo de condicion no es valido en ternario ", self.fila, self.columna))
return TIPO_DATOS.ERROR
tipo2 = self.Exp1.analizar(TS, Errores)
tipo3 = self.Exp2.analizar(TS, Errores)
if tipo == TIPO_DATOS.ERROR or tipo2 == TIPO_DATOS.ERROR or tipo3 == TIPO_DATOS.ERROR:
return TIPO_DATOS.ERROR
def getC3D(self, TS):
codigo = ""
V = TS.getEtq()
F = TS.getEtq()
S = TS.getEtq()
self.temporal = TS.getTemp()
codigo += self.Cond.getC3D(TS)
codigo += 'if (' + str(self.Cond.temporal) + ') goto ' + V + ';\n'
codigo += 'goto ' + F + ';\n'
codigo += V + ':\n'
codigo += self.Exp1.getC3D(TS)
codigo += self.temporal + '=' + str(self.Exp1.temporal) + ';\n'
codigo += 'goto ' + S + ';\n'
codigo += F + ':\n'
codigo += self.Exp1.getC3D(TS)
codigo += self.temporal + '=' + str(self.Exp1.temporal) + ';\n'
codigo += 'goto ' + S + ';\n'
codigo += S + ':\n'
return codigo
def graficarasc(self, padre, grafica):
nombrehijo = 'Node' + str(id(self))
grafica.node(nombrehijo, label=('Exp'))
self.Cond.graficarasc(nombrehijo, grafica)
grafica.node('NodeE1' + str(id(self)), label="?")
grafica.edge(nombrehijo, 'NodeE1' + str(id(self)))
self.Exp1.graficarasc(nombrehijo, grafica)
grafica.node('NodeE2' + str(id(self)), label=":")
grafica.edge(nombrehijo, 'NodeE2' + str(id(self)))
self.Exp2.graficarasc(nombrehijo, grafica)
class casteo(Node.Nodo):
def __init__(self, Cast, Exp, fila, col):
self.fila = fila
self.columna = col
self.Exp = Exp
self.cast = Cast
def analizar(self, TS, Errores):
self.Exp.analizar(TS, Errores)
if self.cast == "char":
self.tipo = TIPO_DATOS.CHAR
elif self.cast == "int":
self.tipo = TIPO_DATOS.INT
elif self.cast == "float":
self.tipo = TIPO_DATOS.FLOAT
def getC3D(self, TS):
codigo = ""
codigo += self.Exp.getC3D(TS)
temp = TS.getTemp()
codigo += temp + '= (' + self.cast + ')' + self.Exp.temporal + ';\n'
self.temporal = temp
return codigo;
def graficarasc(self, padre, grafica):
pass
class sizeof(Node.Nodo):
def __init__(self, Exp, fila, col):
self.fila = fila
self.columna = col
self.Exp = Exp
def analizar(self, TS, Errores):
return
def getC3D(self, TS):
self.temporal = "3"
return ""
def graficarasc(self, padre, grafica):
return
| 37.576842
| 163
| 0.557958
| 2,116
| 17,849
| 4.628072
| 0.051985
| 0.101093
| 0.040437
| 0.03431
| 0.879812
| 0.826611
| 0.789441
| 0.752272
| 0.747983
| 0.704483
| 0
| 0.018538
| 0.316993
| 17,849
| 474
| 164
| 37.656118
| 0.784759
| 0
| 0
| 0.735941
| 0
| 0
| 0.046445
| 0.001177
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107579
| false
| 0.002445
| 0.007335
| 0.007335
| 0.246944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e7672f046e74a5182ce8cade5578978f8ade0ff4
| 335
|
py
|
Python
|
logPage/forms.py
|
Mariga123/carpool
|
f7330634ace2718c2347694b207b9dd49ef6538f
|
[
"MIT"
] | null | null | null |
logPage/forms.py
|
Mariga123/carpool
|
f7330634ace2718c2347694b207b9dd49ef6538f
|
[
"MIT"
] | null | null | null |
logPage/forms.py
|
Mariga123/carpool
|
f7330634ace2718c2347694b207b9dd49ef6538f
|
[
"MIT"
] | null | null | null |
from django import forms
# class NameForm(forms.Form):
# userId = models.CharField(max_length = 200 , label = 'userId')
# passWd = models.CharField(max_length = 200 , label = 'passWd')
# firstName = models.CharField(max_length = 200 , label = 'firstName')
# lastName = models.CharField(max_length = 200 , label = 'lastName')
| 33.5
| 71
| 0.695522
| 40
| 335
| 5.725
| 0.425
| 0.262009
| 0.31441
| 0.419214
| 0.558952
| 0.558952
| 0
| 0
| 0
| 0
| 0
| 0.043165
| 0.170149
| 335
| 9
| 72
| 37.222222
| 0.780576
| 0.883582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
e7691867e109ed5489811d0b6e7581f56fc09dfd
| 145
|
py
|
Python
|
nnet/loss/__init__.py
|
trip2eee/nnet
|
08c435a7b40aa0b41eb64875b39d3705cf9cffdd
|
[
"MIT"
] | 3
|
2021-12-31T10:59:54.000Z
|
2022-01-14T11:17:28.000Z
|
nnet/loss/__init__.py
|
trip2eee/nnet
|
08c435a7b40aa0b41eb64875b39d3705cf9cffdd
|
[
"MIT"
] | null | null | null |
nnet/loss/__init__.py
|
trip2eee/nnet
|
08c435a7b40aa0b41eb64875b39d3705cf9cffdd
|
[
"MIT"
] | null | null | null |
from nnet.loss.loss import Loss
from nnet.loss.celoss import CELoss
from nnet.loss.bceloss import BCELoss
from nnet.loss.mseloss import MSELoss
| 24.166667
| 37
| 0.827586
| 24
| 145
| 5
| 0.291667
| 0.266667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117241
| 145
| 5
| 38
| 29
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e7c69f57088ea46fd5d25d78233c8d761697afb4
| 3,027
|
py
|
Python
|
pyaz/sshkey/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sshkey/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sshkey/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage ssh public key with vm
'''
from .. pyaz_utils import _call_az
def list(resource_group=None):
'''
List all of the SSH public keys.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sshkey list", locals())
def show(name, resource_group):
'''
Retrieve information about an SSH public key.
Required Parameters:
- name -- The name of the SSH public key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sshkey show", locals())
def create(name, resource_group, location=None, public_key=None, tags=None):
'''
Create a new SSH public key resource.
Required Parameters:
- name -- The name of the SSH public key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- public_key -- SSH public key used to authenticate to a virtual machine through ssh. If this property is not initially provided when the resource is created, the publicKey property will be populated when generateKeyPair is called. If the public key is provided upon resource creation, the provided public key needs to be at least 2048-bit and in ssh-rsa format.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az sshkey create", locals())
def update(name, resource_group, public_key=None, tags=None):
'''
Update an SSH public key resource.
Required Parameters:
- name -- The name of the SSH public key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- public_key -- SSH public key used to authenticate to a virtual machine through ssh. If this property is not initially provided when the resource is created, the publicKey property will be populated when generateKeyPair is called. If the public key is provided upon resource creation, the provided public key needs to be at least 2048-bit and in ssh-rsa format.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az sshkey update", locals())
def delete(name, resource_group, yes=None):
'''
Delete an SSH public key.
Required Parameters:
- name -- The name of the SSH public key.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- yes -- Do not prompt for confirmation.
'''
return _call_az("az sshkey delete", locals())
| 42.633803
| 366
| 0.708622
| 430
| 3,027
| 4.925581
| 0.213953
| 0.080737
| 0.062323
| 0.050992
| 0.779981
| 0.738905
| 0.738905
| 0.738905
| 0.738905
| 0.738905
| 0
| 0.003313
| 0.20218
| 3,027
| 70
| 367
| 43.242857
| 0.873706
| 0.756194
| 0
| 0
| 0
| 0
| 0.135231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
e7c7b967728d7e2d70121e71c0f9ca23ee32ae4d
| 43
|
py
|
Python
|
denguefever_tw/dengue_linebot/denguebot_fsm/__init__.py
|
NCKU-CCS/line_bot_server
|
954ac77640466f625cc52c2ca8bacd37e87517a3
|
[
"MIT"
] | 3
|
2016-12-31T15:06:44.000Z
|
2017-10-14T16:25:02.000Z
|
denguefever_tw/dengue_linebot/denguebot_fsm/__init__.py
|
NCKU-CCS/line_bot_server
|
954ac77640466f625cc52c2ca8bacd37e87517a3
|
[
"MIT"
] | 8
|
2017-06-02T14:21:59.000Z
|
2021-06-09T17:41:54.000Z
|
denguefever_tw/dengue_linebot/denguebot_fsm/__init__.py
|
NCKU-CCS/line_bot_server
|
954ac77640466f625cc52c2ca8bacd37e87517a3
|
[
"MIT"
] | 3
|
2017-05-26T06:32:59.000Z
|
2017-07-18T01:27:03.000Z
|
from .denguebotfsm import generate_fsm_cls
| 21.5
| 42
| 0.883721
| 6
| 43
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093023
| 43
| 1
| 43
| 43
| 0.923077
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
99d0f3e3c093d852a38f97dcaf11e93807acad50
| 36
|
py
|
Python
|
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/81_como_criar_modulos/outro.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/81_como_criar_modulos/outro.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
Curso_Python/Secao3-Python-Intermediario-Programacao-Procedural/81_como_criar_modulos/outro.py
|
pedrohd21/Cursos-Feitos
|
b223aad83867bfa45ad161d133e33c2c200d42bd
|
[
"MIT"
] | null | null | null |
def fala_oi():
return 'Pedro'
| 7.2
| 18
| 0.583333
| 5
| 36
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 36
| 4
| 19
| 9
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
822fb7e39b96d62ead8a1254e15eec06321564bb
| 1,585
|
py
|
Python
|
tests/test_resource_user_item.py
|
zgoda/wk-backend
|
e58bbc1a23cc93d3cf5e259f30653fac36a5fa00
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_resource_user_item.py
|
zgoda/wk-backend
|
e58bbc1a23cc93d3cf5e259f30653fac36a5fa00
|
[
"BSD-3-Clause"
] | 6
|
2021-11-03T00:55:06.000Z
|
2022-03-02T00:39:30.000Z
|
tests/test_resource_user_item.py
|
zgoda/wk-backend
|
e58bbc1a23cc93d3cf5e259f30653fac36a5fa00
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import url_for
def test_user_modify(client, login, user_factory):
email = "test@example.com"
password = "pass"
name = "test name"
user_factory(email=email, password=password, name=name)
tokens = login(client, email, password)
headers = {"X-CSRF-TOKEN": tokens.csrf_access_token}
url = url_for("api.user_item", email=email)
data = {"name": "My Fancy Name"}
rv = client.patch(url, json=data, headers=headers)
assert rv.status_code == 200
assert rv.json["user"]["name"] == data["name"]
def test_user_modify_fail_notfound(client, login, user_factory):
email = "test@example.com"
another_email = "another@example.com"
password = "pass"
user_factory(email=email, password=password)
tokens = login(client, email, password)
headers = {"X-CSRF-TOKEN": tokens.csrf_access_token}
url = url_for("api.user_item", email=another_email)
data = {"name": "My Fancy Name"}
rv = client.patch(url, json=data, headers=headers)
assert rv.status_code == 404
def test_user_modify_fail_notallowed(client, login, user_factory):
email = "test@example.com"
another_email = "another@example.com"
password = "pass"
user_factory(email=email, password=password)
user_factory(email=another_email, password=password)
tokens = login(client, email, password)
headers = {"X-CSRF-TOKEN": tokens.csrf_access_token}
url = url_for("api.user_item", email=another_email)
data = {"name": "My Fancy Name"}
rv = client.patch(url, json=data, headers=headers)
assert rv.status_code == 403
| 36.860465
| 66
| 0.692744
| 217
| 1,585
| 4.884793
| 0.193548
| 0.072642
| 0.10566
| 0.048113
| 0.860377
| 0.820755
| 0.785849
| 0.785849
| 0.74717
| 0.74717
| 0
| 0.00687
| 0.173502
| 1,585
| 42
| 67
| 37.738095
| 0.80229
| 0
| 0
| 0.666667
| 0
| 0
| 0.154574
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.083333
| false
| 0.277778
| 0.027778
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8232d78ff558c33d975d96c8a8ba6f10b564042b
| 139,770
|
py
|
Python
|
projects/src/main/python/CodeJam/Y12R5P1/kmod/generated_py_3eefd069353841ada4d18bf604bdf2b7.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 5
|
2020-04-05T18:04:13.000Z
|
2021-04-13T20:34:19.000Z
|
projects/src/main/python/CodeJam/Y12R5P1/kmod/generated_py_3eefd069353841ada4d18bf604bdf2b7.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 1
|
2020-04-29T21:42:26.000Z
|
2020-05-01T23:45:45.000Z
|
projects/src/main/python/CodeJam/Y12R5P1/kmod/generated_py_3eefd069353841ada4d18bf604bdf2b7.py
|
DynamicCodeSearch/CodeSeer
|
ee985ece7691691585952eb88565f0e08bdc9113
|
[
"MIT"
] | 3
|
2020-01-27T16:02:14.000Z
|
2021-02-08T13:25:15.000Z
|
import sys
sys.path.append('/home/george2/Raise/ProgramRepair/CodeSeer/projects/src/main/python')
from CodeJam.Y12R5P1.kmod.a import *
def func_6f7a60ff95a5430cafffcf8cc1a232b0(idx1, idx2, l, p):
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
def func_0cf0b423eb914d7e8bc26be000f755f7(idx1, idx2, l, p):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
def func_df3a048727c24f62aed20af55c603a80(f, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
return l
def func_b4018aba03484b56860b5f35535384fa(f, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
return n
def func_e7e9ae566f58421aa45e501fd9404fde(f, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
return f
def func_ce91211b89d04caaa9b05e53aff5c115(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return f
def func_2b6d6caa74cb4ced8630993da85da329(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return p
def func_74db6412983d4e2e8d1c33b22ab0170a(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return l
def func_b3e56949a51141e79079fdb7f3e2e199(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)
return f
def func_b3aaa706be8d492db06bec0564a2ee07(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)
return p
def func_bf7127e3ed9244878b80b7661042ca53(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)
return l
def func_6f1f94700b3441c1a20447c8ad92abd8(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)
return n
def func_e86f6c9957a14be78ad33b627f5cd001(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
return l
def func_975f1823fa12444c9b34d79a411bfc37(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
return p
def func_78663e6b4b3b4ce4a3414ccc69ef30ae(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
return n
def func_1fb1767d42df42e0bd87a89d96c8f2f1(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
return n
def func_d97619ea782b4ea7a7f8abcd5208f494(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
return idx
def func_bcdeaebc766745119e90c50c12ceb8db(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
return l
def func_fd7c77c7ebfd446bad535b27a084bc92(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
return p
def func_a5e434966dd1494fa4ba3df6cd9c0bc1(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_dbfe90a40b5444cab94e6a360aea660d(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_038716315e984118b64ea930897a336f(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_467893eeb9fe4ea28478c789fa325a73(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_dd42025f12be4fe6a4a364593e63474a(idx, l, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_6cf0a201a2004cf99632d2a2f3ad2d4a(idx, l, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_189823bf50ca422a9e8faedfacc97ea0(idx, l, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_7b7011a47ad54cd4bb32f241fd9d15a1(idx, l, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_7160e54ae0f244f394638e3628aa7c10(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_b5cda45188714ce298557627fd161aeb(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_1e959ff85ba641c2894bc1bd23167556(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_4c633eeecf834542ae18fda6e5857f28(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_f77f6af4492c48bc9d39fe9788e9039b(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_4588ef8a638947ac93adf7f3b233ec4d(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_38d608ec286040fdad5b1df8dc11e50b(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_09ccbcfbbe6346dcadcf358b39cea97b(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return n
def func_8136661a6f0849cc84ed59b9ce9731ba(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return l
def func_bc6c37838dce4392ae30bf7594cea009(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return f
def func_269d916836394357962cbdff39dcc866(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
return p
def func_a5bba859598142569c9f08cb6436b9dd(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return l
def func_db1b1c3cc2754e70a8181e31d8f3eb7e(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return f
def func_b247a53362f04d27ab1d603bfb1f09b0(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return n
def func_8b58dbe7976243528a7db6dc031b07f0(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return p
def func_651216ed8e4d4d75b2efbede421dd8d3(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return l
def func_43109bc1bbb24af886725bb73b30cecf(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return p
def func_61120cfc9a5d4d58b11e73c5f17e0f37(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return f
def func_3564baa698a14bfe864ded98405dd2a9(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return n
def func_a7b980422cfa456e90cdde9ae3a3bc34(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
return idx
def func_75c643f6d7be45f0a09d4a4a9a9d4742(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
return p
def func_745f164856774bb48a7025f73906fc2a(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
return n
def func_4bdf00b6ea4943b083f7cf95adfd827d(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
return l
def func_68633a7dd7e5437fa0efc234ffa85a1a(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_3a8012b47a0d4cdb8c076f2674c0ce96(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_791f8043777244a5a57497b04344c706(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_d292949f4629439ab4d5ca9e7a6e9551(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_060f1d867b8f4cc49cfa6f5bcfd1512b(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_d3f6a0edfc9a41b1a568e3bf86c30db2(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_7412d089188b49b8a195868e6c7cc0a0(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_9f559a3d8bcb47ee98f4937cb04b65a1(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_e60941f702d1438d9d43bf5d12e9dd6a(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_a6b295b547ab42d898615f4693af25a4(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_50dae65c298c4884a5fcb06176a54e0c(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_18c558105f394bf5be0d30346ce06c5e(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_cbcf1257dc6041e6b742680a7e654504(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_6fb3bac52ba84a03b9ba57bcaf71f86c(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_cce8807b409b4e869282ffed8df7961d(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_fc8b2875b0aa468b9eb55febb32ac828(idx, l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_a4f05726f5054ea188b1d4b774dbd3f5(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return f
def func_04894ebca30446a69d969034e7e88a00(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return p
def func_7dbb02fc38344a969a09a8eee17793f2(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return l
def func_33e68e0ea1624dda82099d13d1ff8823(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)
return n
def func_ebfa5f10a92c4b58b837b1ac372dd0bb(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return n
def func_5453cf70c29a4201a711ab0c99d4f3e7(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return p
def func_77983b0c2cdb494286792f5159c6028c(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return l
def func_8a686ff9e17244a39d3103eb09c64d58(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return f
def func_e90c5e3ba6a0410b91010d0a1a453a48(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return p
def func_f76d536cbf9b4187996734a21709f1c0(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return l
def func_028625557ec542a28e8ea01ef29990d6(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return idx
def func_af386ef647a848d3ba5b9e8bff3cc00c(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return n
def func_a357be89685c4e0a91556584de5a77f5(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return f
def func_937a52a7e4fc4ed6bdc75580bf298966(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_bf733f31d02f46f7a973c5e2bb5ee91d(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_5ff61c293bff49c0ae205945213c78c2(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_cf45ed5e5c41479eaa4e6f4e588fd430(l, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_cd3f27c0a13143ec82aef99bd520572a(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_876c5b8f97ad4e5888d948ed030ae829(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_ecf428ea8aab46b3bccd7da84517160c(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_3386ab41d23e4b7088678031a56111af(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_0f9096ccda314f97801224489279c366(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_b6325d480a2747b9a79d45a89b48f2a8(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_3b43249a86b349439c590bd2b1e8ad77(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_799ea175e73b40498ded9edb55549c3f(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_035a5d0764164c22b2448988e84cfa78(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_776da4d9f6eb44a8aea7e6505128e04b(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_b807b30007704917adea19bb0753c488(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_3bf341908bd14a959af36035da0fbdc7(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_1a6295e682494d4abb9fda675af8198a(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return l
def func_578b991fc8764e69afcc7591b7cf8485(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return p
def func_0bc362fb278b4359b6b4d3b4d4095e5a(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return f
def func_10c4586faf5540ef9eb3049533bf3ab2(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
return n
def func_6f4b0092e2f044a2ad6982205b028fd0(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return p
def func_72804d678cb648db87d8e6b4533cc377(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return n
def func_f721096e85a342aca4561e754c0089b4(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return f
def func_dabca126d0954c7097e2cf62bafee4e8(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return l
def func_fbd032e5050c46e68e66a81e71129dba(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return idx
def func_a6b0503503404e658a1adc54d292b349(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return f
def func_c5d008eba6ca43ee8200c1bacdbbbf42(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_d5ff4d78e61147f7b53fc16bc77c9fbf(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_4a1590f22b6e4814a563cd32a5c814ac(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_9df8f9ed52d145d89394a0b29ceae3b7(l, f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_b6b08a2af8ef4a2999aa132f310438b4(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_af7d0718ad884c29b431817971cdf215(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_2fa4f9ab4bac46339f4fb2a263729057(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_c2af2dfc068942da84a18f2cc18bc451(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_1b9abd6b0b304c1b9c994ea933c487b0(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_1860b4ab903b4fbe8414042179bad657(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_14dd1509a7b04eaabd2c3e1639411638(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_12e43e0c93b1485ebd8b3eae308531dc(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_84ccb66cb91c4d61b336fb440dabb1ec(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_d6df52c0938448f2bb874eaa2b41c635(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_1257c6987a7246cb80beeac346468fe5(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_4e6ad0344ade4ae7877e7f80fc7f22ad(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_909b52d81c7f411bb361b89400cb66c5(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return idx
def func_b0821209987843f2a4ac86f2b26d041d(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return p
def func_fbf2eefbebc24823957d6de7fefe0933(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return l
def func_71beba829061492082db93187a67f53b(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return n
def func_ec271d1a0ed94738ae2cbadef36ef071(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
return f
def func_dcfe90c969584e909e286a3c6342a330(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_69a00f03814e4d4db6ca0498e997fb18(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_2698fcf1462f46a79a9c4c4f3fa39c09(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return f
def func_2b994cfde08848c6ba04bf8d0db18dd1(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_ae586fb49dfa430c8f2e33066070498b(f, n):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_c40c57bdeff243a0bd08e0e62cf0ace4(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_fe7aae797ff4407da202a829b717d641(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_4168139cbbba4a8aaa5e861d4c6e1909(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return f
def func_3294af1ebd2b431bbadf23bf6c8c9ec9(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_9fd2e0939f364538abb16e2139baf9bd(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_4ae0e52b2b6247a3ab5d6e4105a6f3b7(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_7e0d58620f0240d78074177178f27531(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_1ba18f86207b4a52bd34bb01a098fbf8(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_180fc8665cf34ebb964951983125ed44(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_172e9042ddfa47b899ddf04a65805a4c(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_a02fccb11c664d7aa214044184bc8613(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_5d4c595179e543958d8824f4c193f377(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_7ebe45197a4f402f97e60c466fadb0ca(l, n, p, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_cb479cb7bd224b42a2eaefe35d31fc25(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return l
def func_ffe12f16789b4896affd0a951ab4170c(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return f
def func_d77ce19309cb47508507a016df4a3dbb(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return n
def func_5165c1531ea84003add0c7e64112cd5f(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return p
def func_8fb112e1b2704a59b5c6d37e82659954(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)
return idx
def func_f8b81336d4964c7db2fd94c3bf01b212(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return f
def func_fec89a827ffb458eb39aad7cfe5342ee(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_8283d0391e874b63bc84e270c1bdfc29(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_f5d39d0c08334371bd203f46813b85db(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_0bbdfd647bac4cf98545fc80a2baec47(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_b7945cde5fd147278a1af0e8ed7ee739(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_902960d714e5456da9c0cf1009a5a648(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_a1621435f57b40498f633c7cb8ecab43(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_56dfc59aca1f4703926673bdc6ed7abf(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_c7c95c3456724096922c466472813271(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_8b512f0fe125478bbab5e448d1f7a93b(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_e8334c58eff14baaaee8dbd07ea52381(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_8a5dee05ef894109bfb578383d793801(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_5341a42b1f8249fd958d79ef95b292b3(l, f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_0f34f636d50543888226ae8bb47e2c44(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return n
def func_66093292a3344133bea469b7315eedd2(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return idx
def func_cc3fdd7fc3d54ca18cba35ca5634e37f(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return _t
def func_7e1820e4f1af4139b1aafbd193e755ed(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return f
def func_78350f7fbcd34ccd97811f6552334279(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return l
def func_d30d7ecb5d7843419f677287d832c1b2(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
return p
def func_6bfce8340b76408ba3fbbebf4ed60c4d(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_44db3e6d85aa4aa0bea269758fc1e235(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_3286ec290f424adaa09c339ba3d2063f(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_015dcf35780348d2a9a266e32b648c2c(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_05e15176604847979133fcb1a8fea6ca(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_c643e1f8da7f4a9b955c9c836574a610(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_15400335d4d249a3b5601898402feb03(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_124dc76f65de4170b0bcb2d6dd1656ae(f, n, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_13e27a6057fc4db38afe9fa8294dca0d(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_d77ab0db60f848bea206f4af0fd9df79(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_1209b5638d0249e5b359b86c964b49e9(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_d6d28988f5ef412983139f12a2391bb8(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_bc438553c9434d95b520e4f07241cac4(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_6be6dd8c6bfa4a2e8e86fa55e8db48cf(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_799ffc9a76014feca93153cf45492c82(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_2c4e0c5e3f8b4498b0c22ddec3fe8830(f, _t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())(len(l) == n)(len(p) == n)
idx = range(n)
idx.sort(cmp=cmp)('Case #%d:' % (_t + 1))
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_03ccfe1a45c14303aab26279e5a087d1(l, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
return f
def func_3fcb23bfa97941f391deeca3206ef28a(l, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
return t
def func_0f2f1d1999074a10888dc0f940ea3b5d(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_719998d060074ac3a0242ae24ac5cac4(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_20943c27564e4ec991d2e92df10faeaf(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_6a4204540a4245559d882722d8671f6f(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return t
def func_eb412b3d1ba34742b28cf48ad3d63fb4(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_2cb80b317ea444c985d9b5b2e4713885(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_24368e21f9534b1bb3a1149d3226cb0c(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_30e13bb299264702991a726318fd0f71(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_659d7dfd42b4419d9565a8e101429fb2(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_5ad499a7e5084a27877e828d497b6514(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return p
def func_c0842300a5494e5b9c5438eb523a9f00(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_7df71f63bde04a6582e501df252f42ac(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_d3853f59084640e3bf44c28e5288c050(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return l
def func_332799381c4f4d61bf584ab145e8c878(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return n
def func_6d7e267fb1f746debd5d52aec3f811e2(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return t
def func_c2b5aae19bc346c7a8c62924b5140fe5(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_f28d65dfcc8c45b49ea0c7e86c99703a(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return f
def func_69368393ad4f479d80d75c1cbaa5b1ea(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_f2f172c923e34a9f9c94d7de2d7e53e9(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return ix
def func_72b197b3adef47a9a89da790a6b8df3d(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return f
def func_b814cafa70e346d98a0eff8122d3d34c(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _i
def func_7e1de85e9df0437a916cf259b1e84914(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return l
def func_439795d75ee245dfa263433c9be83aaf(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return p
def func_9d87efa77880484f9988813f211a3467(_i, l, ix, f, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
f.close()
return n
def func_07f99c1341434b3493e4f5d00bf1c55b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_a8202435de414d64a65156c28c7c8347():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_6e183becd4b54e499b6e5e2bd0489e42():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_0dc5057bd0ee490ab58b44481d63ece5():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_2f0fb2f9f66f43868f3082d375fa0ec4():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_8d9af47573524d8ba199b9614459c2aa():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_e14806f99b09421db16a2ae0f73a3edc():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_c3b3cd68177443acad8d6d1f5187653b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_91c679e6e35e45239577bb63b4e43a2d():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return t
def func_75b8a4c968fc4471b0f5495a1a6b54d4(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return p
def func_524d6dbab056473bb41647e973cd4da1(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_8770e306a4544c1f828881334bb821d6(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_4dafd8a8b7414070b7acc88170132096(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return l
def func_72b93e27adfb49a5aec96024f453d95e(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return n
def func_a3002c794d4247e6a03296cb8843bfc6(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return t
def func_e4566d4ad3294c6c9812e728e295d27c(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_1c14feee7f474d629e1626d35296d64c(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return f
def func_1dd83b42af21402eae02974be8421214(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_94a6288ba7be44d8ae7af5d7e7b7cd5d(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return t
def func_48f07d6ded244a4586c06f7a83a3601c(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return l
def func_53cc00a4807d4d91a0e43ed2ea06f820(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return p
def func_81f0e9e87cb74c929b7f5367516d393d(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return ix
def func_908ddd02fdea44e7a951c63cc2035678(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return idx
def func_5c885276585a43d191c6c545b66924fc(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _t
def func_f96dd4ba043b44f4ae17c0301c1e9ce1(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return n
def func_6204a68aaded4429955305f954e99a0a(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return f
def func_82d3d843d78f488cb5bb0986633cac50(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _i
def func_3cf0adf9413a4e0289820f5ba5ed59c9():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return f
def func_12b667d2b07f46788b95cf34df1e960d():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_2d8bf803214c494c854153df4a090342():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_6df2282e28c7454a90a392d744361ba6():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_844800436b934eb486ff9fbb47c19c1b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return l
def func_645b924f56eb4335a602fe4de4d593e7():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return t
def func_d889c4aa0d5543a7a46a7fbe32e9e98c():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return n
def func_b4da8167ceff4df8a572e1c831d4f5f1():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return p
def func_737622a22738460c93026f1008068b6b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_1a4cde10fe9d4b06b10d714f837bdff2(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _t
def func_9890ed1c9cb24fec8554ba2cb9f25b1f(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return n
def func_56e1a51faaab426083263f6c462f1220(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _i
def func_7b15e886fc9c4ffe809f94e91dbe72fe(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return idx
def func_eec35fe0017440f3be64f9dc349c1b5d(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return t
def func_01ba2346c3aa4e82b257646375217cff(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return ix
def func_834776f47a0a434d8030755156983193(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return f
def func_1999d52abc8a4467bc512bf6d89d5102(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return l
def func_473d158e367547b79149699329d10eb8(f):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return p
def func_b64a8b343f0c4a3bb08eea3133d9e128():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return idx
def func_09e393aee66246b4966ad8db4d60947f():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return ix
def func_042ccd2d0e444068b4b268c1cd194807():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return l
def func_d6319031ba9d44bd8a057fe4deed201b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _t
def func_b8d32ea8204743a4bd888914cfb46325():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return f
def func_d35ee87bbd61450a9f1acd7a038d2382():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return t
def func_077fddbeb1084e9d8f2daf07bbd65cc8():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return p
def func_841ff7fc6de84595807ba5b10f55065b():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return n
def func_24e15b3c93d54c36bab8ba719d563fbd():
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
f = open('codejam/test_files/Y12R5P1/A.in')
t = int(f.readline())
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
if _i != n - 1:
print ix,
else:
print ix
f.close()
return _i
def func_19e1161b36284337bea05d4cb0abef5a(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return l
def func_c214b178a8f84449ac41899ce936df6c(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return n
def func_baeeb6ebcbec4be18b25fd7db8d61c27(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return t
def func_7d8282287db34f42bd67f0b0465d56ee(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return f
def func_23d192593ba84dd9a921805d7fe02d9b(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return p
def func_a0c45844a960429fa35ab850d9591bf8(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_fed6d7e19a14429e9b812236d9d6aac3(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return _t
def func_175c68b9ab914dcb8518565f5905ebf5(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return idx
def func_8e163476f03c4f40aeca16c53ef5f418(f, t):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
for _t in xrange(t):
n = int(f.readline())
l = map(int, f.readline().split())
p = map(int, f.readline().split())
assert len(l) == n
assert len(p) == n
idx = range(n)
idx.sort(cmp=cmp)
print 'Case #%d:' % (_t + 1),
for _i, ix in enumerate(idx):
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_964d2743bb444b28af4c8dcd6c2dbe85(_i, l, ix, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
return _i
def func_8a3f9e2ca6604f13929e2cabd50097e1(_i, l, ix, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
return p
def func_cc3a06cf130f40a89dd443170c7e42fa(_i, l, ix, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
return n
def func_d636e107dc9240bc80002d88b19e6d14(_i, l, ix, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
return ix
def func_8bd399474fe84f1588c7707209b60caf(_i, l, ix, n, p):
def cmp(idx1, idx2):
if p[idx1] * l[idx2] > p[idx2] * l[idx1]:
return -1
if p[idx1] * l[idx2] < p[idx2] * l[idx1]:
return 1
return idx1 - idx2
if _i != n - 1:
print ix,
else:
print ix
return l
| 26.218346
| 86
| 0.478965
| 19,899
| 139,770
| 3.31092
| 0.016735
| 0.045079
| 0.05918
| 0.067634
| 0.862486
| 0.862486
| 0.862486
| 0.862486
| 0.862486
| 0.862
| 0
| 0.11283
| 0.36437
| 139,770
| 5,330
| 87
| 26.223265
| 0.628757
| 0
| 0
| 0.937278
| 0
| 0
| 0.018309
| 0.006911
| 0
| 0
| 0
| 0
| 0.036032
| 0
| null | null | 0
| 0.000445
| null | null | 0.109208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
824181423809e06d738018b26f0645d03cb19e25
| 49
|
py
|
Python
|
examples/dwf/__init__.py
|
useblocks/dwf
|
debfb79cecfa57310627c78c4e5c68e21f7c7b6f
|
[
"CC-BY-4.0"
] | 6
|
2017-11-24T08:47:06.000Z
|
2021-06-25T12:02:06.000Z
|
examples/dwf/__init__.py
|
useblocks/dwf
|
debfb79cecfa57310627c78c4e5c68e21f7c7b6f
|
[
"CC-BY-4.0"
] | null | null | null |
examples/dwf/__init__.py
|
useblocks/dwf
|
debfb79cecfa57310627c78c4e5c68e21f7c7b6f
|
[
"CC-BY-4.0"
] | null | null | null |
from .dwf import Dwf
from .dwf import Frustration
| 24.5
| 28
| 0.816327
| 8
| 49
| 5
| 0.5
| 0.35
| 0.65
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 2
| 28
| 24.5
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4132dfcbc794091a46363f841d03fa3fc7757762
| 161
|
py
|
Python
|
sdc/tests/tests_perf/__init__.py
|
sklam/sdc
|
36340f991c9354e626dea6d3511f3f81d09448aa
|
[
"BSD-2-Clause"
] | null | null | null |
sdc/tests/tests_perf/__init__.py
|
sklam/sdc
|
36340f991c9354e626dea6d3511f3f81d09448aa
|
[
"BSD-2-Clause"
] | null | null | null |
sdc/tests/tests_perf/__init__.py
|
sklam/sdc
|
36340f991c9354e626dea6d3511f3f81d09448aa
|
[
"BSD-2-Clause"
] | null | null | null |
from sdc.tests.tests_perf.test_perf_unicode import *
from sdc.tests.tests_perf.test_perf_series_str import *
from sdc.tests.tests_perf.test_perf_series import *
| 40.25
| 55
| 0.850932
| 28
| 161
| 4.535714
| 0.321429
| 0.165354
| 0.283465
| 0.401575
| 0.874016
| 0.874016
| 0.874016
| 0.645669
| 0.645669
| 0
| 0
| 0
| 0.074534
| 161
| 3
| 56
| 53.666667
| 0.852349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
68e41ba2012a9dfd36b15c1f2fe78a29438f38b5
| 165
|
py
|
Python
|
pwnlib/encoders/i386/__init__.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 8,966
|
2015-01-02T11:58:14.000Z
|
2022-03-31T21:19:56.000Z
|
pwnlib/encoders/i386/__init__.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 1,401
|
2015-01-01T00:56:22.000Z
|
2022-03-31T16:19:53.000Z
|
pwnlib/encoders/i386/__init__.py
|
tkmikan/pwntools
|
1238fc359eb72313d3f82849b2effdb7063ab429
|
[
"MIT"
] | 1,844
|
2015-01-07T04:38:06.000Z
|
2022-03-30T03:54:46.000Z
|
from __future__ import absolute_import
from pwnlib.encoders.i386 import ascii_shellcode
from pwnlib.encoders.i386 import delta
from pwnlib.encoders.i386 import xor
| 27.5
| 48
| 0.860606
| 24
| 165
| 5.666667
| 0.458333
| 0.220588
| 0.397059
| 0.485294
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060811
| 0.10303
| 165
| 5
| 49
| 33
| 0.858108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6b99d703117c125b67ee7ab24c0fbe5e8ffa533a
| 11,992
|
py
|
Python
|
tw_cooking_game_puzzle/role_master_builder.py
|
tatsubori/visual-hints-textworld-rl
|
a2b4818f46bed10d08f1a0747ac548e6419f0b46
|
[
"Apache-2.0"
] | 4
|
2020-10-09T16:21:48.000Z
|
2021-01-06T17:41:04.000Z
|
tw_cooking_game_puzzle/role_master_builder.py
|
tatsubori/visual-hints-textworld-rl
|
a2b4818f46bed10d08f1a0747ac548e6419f0b46
|
[
"Apache-2.0"
] | 1
|
2022-02-26T03:29:39.000Z
|
2022-02-26T03:29:39.000Z
|
tw_cooking_game_puzzle/role_master_builder.py
|
tatsubori/visual-hints-textworld-rl
|
a2b4818f46bed10d08f1a0747ac548e6419f0b46
|
[
"Apache-2.0"
] | 4
|
2020-10-20T05:17:02.000Z
|
2022-02-26T03:29:27.000Z
|
def write_clue(name_file, way, rooms_dict: dict, dict_game_goals: dict, dict_rooms_numbers: dict, death_room: str,
name_type=["literal", 'random_numbers', 'room_importance']):
'''
write a txt file for that will be used by the game master class
:param death_room: if there is a death_room
:param name_file: the name of the file
:param way: a table that gives the way between the cooking place and the room where is the hint
:param rooms_dict:dictionary of the rooms see 'def build_dict_rooms' in cooking_map_builder
:param dict_game_goals: a dictionary with the main goals and place of important elements for the game
see( def build_dict_game_goals) in cooking_map_builder
:param dict_rooms_numbers: a dictionary with the number of each room in the mode 'random_numbers'
see( def draw_map) in cooking_map_builder
:param name_type: three possibilities 'literal' : the true name of the room (eg. kitchen, bedroom)
'random_numbers': a random number is attributed to each room
'room_importance': each room receive a number based on the importance of the room
see( def draw_map) in cooking_map_builder
:return:
'''
list_rooms_s_goals = []
if 'secondary_goals' in dict_game_goals:
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms_s_goals:
list_rooms_s_goals.append(dict_game_goals['secondary_goals'][sgoal])
f = open(name_file, "w+")
f.write('####clue####\r\n')
if name_type == 'literal':
f.write('\r \n')
f.write('easy:\r')
clue = 'you are in the {}, take the ingredients in '.format(way[-1])
if len(list_rooms_s_goals) == 0:
clue += 'inventory,'
else:
list_rooms =[]
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the {}, '.format(dict_game_goals['secondary_goals'][sgoal])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the {}, '.format(dict_game_goals['cooking_location'])
if death_room is not None:
clue = clue + 'and avoid the death room which is the {}'.format(death_room)
clue = clue+'\r\n'
f.write(clue)
f.write('\r \n')
f.write('medium:\r')
clue = 'take the ingredients in '
if len(list_rooms_s_goals) == 0:
clue += 'inventory,'
else:
list_rooms = []
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the {}, '.format(dict_game_goals['secondary_goals'][sgoal])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the {}, '.format(dict_game_goals['cooking_location'])
if death_room is not None:
clue = clue + 'and avoid the death room which is the {}'.format(death_room)
clue = clue + '\r\n'
f.write(clue)
f.write('\r \n')
f.write('hard:\r')
clue = 'take the ingredients in '
if len(list_rooms_s_goals) == 0:
clue += 'inventory,'
else:
list_rooms = []
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the {}, '.format(dict_game_goals['secondary_goals'][sgoal])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the {}'.format(dict_game_goals['cooking_location'])
if len(rooms_dict) != 1:
clue = clue +', note that '
uninteresting_place = False
for k in rooms_dict:
if k not in list_rooms_s_goals and k != dict_game_goals['cooking_location']:
clue = clue + 'the {}, '.format(k)
uninteresting_place = True
if uninteresting_place:
clue = clue[:-2] + ' are uninteresting places'
clue = clue + '\r\n'
f.write(clue)
f.write('\r \n')
f.write('very hard:\r')
clue = ''
if len(rooms_dict) != 1:
uninteresting_place = False
for k in rooms_dict:
if k not in list_rooms_s_goals and k != dict_game_goals['cooking_location']:
clue = clue + 'the {}, '.format(k)
uninteresting_place = True
if uninteresting_place:
clue = clue[:-2] + ' are uninteresting places'
else:
clue = clue + 'there is only one room'
clue = clue + '\r\n'
f.write(clue)
f.write('\n')
if name_type == 'random_numbers':
f.write('\n')
f.write('easy:\r')
clue = 'you are in the room {}, take the ingredients in '.format(dict_rooms_numbers[way[-1]])
if len(list_rooms_s_goals) == 0:
clue += 'I,'
else:
list_rooms = []
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the room {}, '.format(dict_rooms_numbers[dict_game_goals['secondary_goals'][sgoal]])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the room {}, '.format(dict_rooms_numbers[dict_game_goals['cooking_location']])
if death_room is not None:
clue = clue + 'and avoid the death room which is the {}'.format(dict_rooms_numbers[death_room])
clue = clue + '\r\n'
f.write(clue)
f.write('\r \n')
f.write('medium:\r')
clue = 'take the ingredients in '
if len(list_rooms_s_goals) == 0:
clue += 'inventory,'
else:
list_rooms = []
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the room {}, '.format(dict_rooms_numbers[dict_game_goals['secondary_goals'][sgoal]])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the room {}, '.format(dict_rooms_numbers[dict_game_goals['cooking_location']])
if death_room is not None:
clue = clue + 'and avoid the death room which is the {}'.format(dict_rooms_numbers[death_room])
clue = clue + '\r\n'
f.write(clue)
f.write('\r \n')
f.write('hard:\r')
clue = 'take the ingredients in '
if len(list_rooms_s_goals) == 0:
clue += 'inventory,'
else:
list_rooms = []
for sgoal in iter(dict_game_goals['secondary_goals']):
if dict_game_goals['secondary_goals'][sgoal] not in list_rooms:
clue = clue + 'the room {}, '.format(dict_rooms_numbers[dict_game_goals['secondary_goals'][sgoal]])
list_rooms.append(dict_game_goals['secondary_goals'][sgoal])
clue = clue + 'and cook in the room {}'.format(dict_rooms_numbers[dict_game_goals['cooking_location']])
if len(rooms_dict) != 1:
clue = clue + ', note that '
uninteresting_place = False
for k in rooms_dict:
if k not in list_rooms_s_goals and k != dict_game_goals['cooking_location']:
clue = clue + 'the room {}, '.format(dict_rooms_numbers[k])
uninteresting_place = True
if uninteresting_place:
clue = clue[:-2] + ' are uninteresting places'
clue = clue + '\r\n'
f.write(clue)
f.write('\r \n')
f.write('very hard:\r')
clue = ''
if len(rooms_dict) != 1:
uninteresting_place = False
for k in rooms_dict:
if k not in list_rooms_s_goals and k != dict_game_goals['cooking_location']:
clue = clue + 'the room {}, '.format(dict_rooms_numbers[k])
uninteresting_place = True
if uninteresting_place:
clue = clue[:-2] + ' are uninteresting places'
else:
clue = clue + 'there is only one room'
clue = clue + '\r\n'
f.write(clue)
f.write('\n')
if name_type == 'room_importance':
f.write('\n')
f.write('easy:\r')
clue = 'Go in rooms with a 2, finish by the room with 1'
if death_room is not None:
clue += ', avoid room -1'
f.write(clue)
f.write('\r \n')
f.write('medium:\r')
clue = 'rooms with 0 have no interest; room with 1 is the place of the main quest; rooms with 2 are the ' \
'places for secondary quest'
if death_room is not None:
clue += ' and avoid room with -1 which is the death room'
f.write(clue)
f.write('\r \n')
f.write('hard:\r')
clue = 'Go in rooms with a 2 finish by the room with 1'
f.write(clue)
f.write('\r \n')
f.write('very hard:\r')
clue = 'rooms with a 2 are less important than room with 1 and rooms with 0 have no importance'
f.write(clue)
f.write('\r \n')
f.write('####end clue###\r')
rooms_leading_to_death_room = dict()
if death_room is not None:
f.write('\r \n')
f.write('####death room###\r')
for i in range(4):
if rooms_dict[death_room][i] is not None:
if i == 0:
rooms_leading_to_death_room['south'] = rooms_dict[death_room][i]
elif i == 1:
rooms_leading_to_death_room['north'] = rooms_dict[death_room][i]
elif i == 2:
rooms_leading_to_death_room['west'] = rooms_dict[death_room][i]
else:
rooms_leading_to_death_room['east'] = rooms_dict[death_room][i]
f.write('\n')
f.write('easy:\r')
clue = 'the {} is the death room,'.format(death_room)
for k in iter(rooms_leading_to_death_room):
clue = clue + 'the {} go to death room by {}'.format(rooms_leading_to_death_room[k], k) + ' '
f.write(clue)
f.write('\r \n')
f.write('medium:\r')
clue = ''
for k in iter(rooms_leading_to_death_room):
clue = clue + 'the {} go to death room by {}'.format(rooms_leading_to_death_room[k], k) + ' '
f.write(clue)
f.write('\r \n')
f.write('hard:\r')
clue = ''
for k in iter(rooms_leading_to_death_room):
clue = clue + 'avoid the {} of {}'.format(k, rooms_leading_to_death_room[k]) + ' '
f.write(clue)
f.write('\r \n')
f.write('very hard:\r')
clue = 'from the death room there is '
for i in range(4):
if rooms_dict[death_room][i] is not None:
if i == 0:
clue = clue + 'the {} on the {},'.format(rooms_dict[death_room][i], 'north')
elif i == 1:
clue = clue + 'the {} on the {},'.format(rooms_dict[death_room][i], 'south')
elif i == 2:
clue = clue + 'the {} on the {},'.format(rooms_dict[death_room][i], 'east')
else:
clue = clue + 'the {} on the {},'.format(rooms_dict[death_room][i], 'west')
f.write(clue)
f.write('\r \n')
f.write('####end death room###\r')
f.close()
return rooms_leading_to_death_room
| 42.828571
| 123
| 0.555787
| 1,599
| 11,992
| 3.957473
| 0.079425
| 0.054046
| 0.084229
| 0.093869
| 0.80689
| 0.765013
| 0.745101
| 0.723925
| 0.7067
| 0.7067
| 0
| 0.004437
| 0.323466
| 11,992
| 280
| 124
| 42.828571
| 0.775545
| 0.089143
| 0
| 0.823789
| 0
| 0.004405
| 0.220636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004405
| false
| 0
| 0.013216
| 0
| 0.022026
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bcb586e65df2929d0fc8133657e15df23acc2de
| 135
|
py
|
Python
|
week4/runserver.py
|
adityatripathiiit/Python-Based-Automated-Verilog-Code-Generator-For-Arithmetic-Unit
|
a4ac003a842dc0f6c48d2e302c1cac218377f31d
|
[
"Apache-2.0"
] | 1
|
2021-10-01T14:43:44.000Z
|
2021-10-01T14:43:44.000Z
|
week4/runserver.py
|
adityatripathiiit/Python-Based-Automated-Verilog-Code-Generator-For-Arithmetic-Unit
|
a4ac003a842dc0f6c48d2e302c1cac218377f31d
|
[
"Apache-2.0"
] | null | null | null |
week4/runserver.py
|
adityatripathiiit/Python-Based-Automated-Verilog-Code-Generator-For-Arithmetic-Unit
|
a4ac003a842dc0f6c48d2e302c1cac218377f31d
|
[
"Apache-2.0"
] | 3
|
2019-11-14T14:48:50.000Z
|
2021-12-07T13:51:24.000Z
|
import os
os.system('cd "C:\Studies\projects\digital systems\ES_203\week4\week4\week3\django_project" & python manage.py runserver')
| 45
| 123
| 0.785185
| 21
| 135
| 4.952381
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048387
| 0.081481
| 135
| 2
| 124
| 67.5
| 0.790323
| 0
| 0
| 0
| 0
| 0.5
| 0.819549
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6bcc20a95ac74d2a7da4d5b35c5f0e0c9f79d9a1
| 2,160
|
py
|
Python
|
tests/functions/magnetization/test_total_magnetization.py
|
jdalzatec/llg
|
c0acd728d29a9a821ebadc4f1e17e0327d7e238c
|
[
"MIT"
] | 4
|
2019-09-02T19:18:55.000Z
|
2021-05-05T15:04:54.000Z
|
tests/functions/magnetization/test_total_magnetization.py
|
lufvelasquezgo/llg
|
c0acd728d29a9a821ebadc4f1e17e0327d7e238c
|
[
"MIT"
] | 116
|
2020-02-09T05:19:52.000Z
|
2022-03-27T18:47:17.000Z
|
tests/functions/magnetization/test_total_magnetization.py
|
lufvelasquezgo/llg
|
c0acd728d29a9a821ebadc4f1e17e0327d7e238c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `ffortran/functions` package."""
import numpy
import pytest
from llg.functions import magnetization
@pytest.mark.repeat(10)
def test_total_magnetization_random_state(num_sites, random_state):
assert numpy.allclose(
magnetization.total_magnetization(random_state),
numpy.linalg.norm(numpy.sum(random_state, axis=0) / num_sites),
)
def test_total_magnetization_FM_state_x_up(num_sites):
state = numpy.array([[1.0, 0.0, 0.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_FM_state_x_down(num_sites):
state = numpy.array([[-1.0, 0.0, 0.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_FM_state_y_up(num_sites):
state = numpy.array([[0.0, 1.0, 0.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_FM_state_y_down(num_sites):
state = numpy.array([[0.0, -1.0, 0.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_FM_state_z_up(num_sites):
state = numpy.array([[0.0, 0.0, 1.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_FM_state_z_down(num_sites):
state = numpy.array([[0.0, 0.0, -1.0]] * num_sites)
assert numpy.allclose(magnetization.total_magnetization(state), 1.0)
def test_total_magnetization_AFM_state_x(num_sites):
state = numpy.array([[1.0, 0.0, 0.0], [-1.0, 0.0, 0.0]] * (num_sites // 2))
assert numpy.allclose(magnetization.total_magnetization(state), 0.0)
def test_total_magnetization_AFM_state_y(num_sites):
state = numpy.array([[0.0, 1.0, 0.0], [0.0, -1.0, 0.0]] * (num_sites // 2))
assert numpy.allclose(magnetization.total_magnetization(state), 0.0)
def test_total_magnetization_AFM_state_z(num_sites):
state = numpy.array([[0.0, 0.0, 1.0], [0.0, 0.0, -1.0]] * (num_sites // 2))
assert numpy.allclose(magnetization.total_magnetization(state), 0.0)
| 34.83871
| 79
| 0.726389
| 342
| 2,160
| 4.330409
| 0.131579
| 0.06077
| 0.054693
| 0.040513
| 0.83187
| 0.83187
| 0.794058
| 0.765699
| 0.757596
| 0.757596
| 0
| 0.051377
| 0.125926
| 2,160
| 61
| 80
| 35.409836
| 0.733051
| 0.037963
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.277778
| 1
| 0.277778
| false
| 0
| 0.083333
| 0
| 0.361111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bcfb6a89694b3ebc7672d0d12c4cf84f7eef94e
| 72,441
|
py
|
Python
|
sdk/python/pulumi_aws/memorydb/cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/memorydb/cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/memorydb/cluster.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
acl_name: pulumi.Input[str],
node_type: pulumi.Input[str],
auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
final_snapshot_name: Optional[pulumi.Input[str]] = None,
kms_key_arn: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
num_replicas_per_shard: Optional[pulumi.Input[int]] = None,
num_shards: Optional[pulumi.Input[int]] = None,
parameter_group_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
snapshot_arns: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
snapshot_retention_limit: Optional[pulumi.Input[int]] = None,
snapshot_window: Optional[pulumi.Input[str]] = None,
sns_topic_arn: Optional[pulumi.Input[str]] = None,
subnet_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_enabled: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a Cluster resource.
:param pulumi.Input[str] acl_name: The name of the Access Control List to associate with the cluster.
:param pulumi.Input[str] node_type: The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
:param pulumi.Input[bool] auto_minor_version_upgrade: When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
:param pulumi.Input[str] engine_version: Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
:param pulumi.Input[str] final_snapshot_name: Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
:param pulumi.Input[str] kms_key_arn: ARN of the KMS key used to encrypt the cluster at rest.
:param pulumi.Input[str] maintenance_window: Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
:param pulumi.Input[str] name: Name of this node.
* `endpoint`
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[int] num_replicas_per_shard: The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
:param pulumi.Input[int] num_shards: The number of shards in the cluster. Defaults to `1`.
:param pulumi.Input[str] parameter_group_name: The name of the parameter group associated with the cluster.
:param pulumi.Input[int] port: The port number on which each of the nodes accepts connections. Defaults to `6379`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: Set of VPC Security Group ID-s to associate with this cluster.
:param pulumi.Input[str] snapshot_arns: List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
:param pulumi.Input[str] snapshot_name: The name of a snapshot from which to restore data into the new cluster.
:param pulumi.Input[int] snapshot_retention_limit: The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
:param pulumi.Input[str] snapshot_window: The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
:param pulumi.Input[str] sns_topic_arn: ARN of the SNS topic to which cluster notifications are sent.
:param pulumi.Input[str] subnet_group_name: The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[bool] tls_enabled: A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
pulumi.set(__self__, "acl_name", acl_name)
pulumi.set(__self__, "node_type", node_type)
if auto_minor_version_upgrade is not None:
pulumi.set(__self__, "auto_minor_version_upgrade", auto_minor_version_upgrade)
if description is not None:
pulumi.set(__self__, "description", description)
if engine_version is not None:
pulumi.set(__self__, "engine_version", engine_version)
if final_snapshot_name is not None:
pulumi.set(__self__, "final_snapshot_name", final_snapshot_name)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
if name is not None:
pulumi.set(__self__, "name", name)
if name_prefix is not None:
pulumi.set(__self__, "name_prefix", name_prefix)
if num_replicas_per_shard is not None:
pulumi.set(__self__, "num_replicas_per_shard", num_replicas_per_shard)
if num_shards is not None:
pulumi.set(__self__, "num_shards", num_shards)
if parameter_group_name is not None:
pulumi.set(__self__, "parameter_group_name", parameter_group_name)
if port is not None:
pulumi.set(__self__, "port", port)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if snapshot_arns is not None:
pulumi.set(__self__, "snapshot_arns", snapshot_arns)
if snapshot_name is not None:
pulumi.set(__self__, "snapshot_name", snapshot_name)
if snapshot_retention_limit is not None:
pulumi.set(__self__, "snapshot_retention_limit", snapshot_retention_limit)
if snapshot_window is not None:
pulumi.set(__self__, "snapshot_window", snapshot_window)
if sns_topic_arn is not None:
pulumi.set(__self__, "sns_topic_arn", sns_topic_arn)
if subnet_group_name is not None:
pulumi.set(__self__, "subnet_group_name", subnet_group_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if tls_enabled is not None:
pulumi.set(__self__, "tls_enabled", tls_enabled)
@property
@pulumi.getter(name="aclName")
def acl_name(self) -> pulumi.Input[str]:
"""
The name of the Access Control List to associate with the cluster.
"""
return pulumi.get(self, "acl_name")
@acl_name.setter
def acl_name(self, value: pulumi.Input[str]):
pulumi.set(self, "acl_name", value)
@property
@pulumi.getter(name="nodeType")
def node_type(self) -> pulumi.Input[str]:
"""
The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
"""
return pulumi.get(self, "node_type")
@node_type.setter
def node_type(self, value: pulumi.Input[str]):
pulumi.set(self, "node_type", value)
@property
@pulumi.getter(name="autoMinorVersionUpgrade")
def auto_minor_version_upgrade(self) -> Optional[pulumi.Input[bool]]:
"""
When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
"""
return pulumi.get(self, "auto_minor_version_upgrade")
@auto_minor_version_upgrade.setter
def auto_minor_version_upgrade(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_minor_version_upgrade", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> Optional[pulumi.Input[str]]:
"""
Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
"""
return pulumi.get(self, "engine_version")
@engine_version.setter
def engine_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine_version", value)
@property
@pulumi.getter(name="finalSnapshotName")
def final_snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
"""
return pulumi.get(self, "final_snapshot_name")
@final_snapshot_name.setter
def final_snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "final_snapshot_name", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the KMS key used to encrypt the cluster at rest.
"""
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_arn", value)
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
"""
return pulumi.get(self, "maintenance_window")
@maintenance_window.setter
def maintenance_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maintenance_window", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of this node.
* `endpoint`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> Optional[pulumi.Input[str]]:
"""
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
"""
return pulumi.get(self, "name_prefix")
@name_prefix.setter
def name_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_prefix", value)
@property
@pulumi.getter(name="numReplicasPerShard")
def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:
"""
The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
"""
return pulumi.get(self, "num_replicas_per_shard")
@num_replicas_per_shard.setter
def num_replicas_per_shard(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_replicas_per_shard", value)
@property
@pulumi.getter(name="numShards")
def num_shards(self) -> Optional[pulumi.Input[int]]:
"""
The number of shards in the cluster. Defaults to `1`.
"""
return pulumi.get(self, "num_shards")
@num_shards.setter
def num_shards(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_shards", value)
@property
@pulumi.getter(name="parameterGroupName")
def parameter_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the parameter group associated with the cluster.
"""
return pulumi.get(self, "parameter_group_name")
@parameter_group_name.setter
def parameter_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parameter_group_name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number on which each of the nodes accepts connections. Defaults to `6379`.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Set of VPC Security Group ID-s to associate with this cluster.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter(name="snapshotArns")
def snapshot_arns(self) -> Optional[pulumi.Input[str]]:
"""
List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
"""
return pulumi.get(self, "snapshot_arns")
@snapshot_arns.setter
def snapshot_arns(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_arns", value)
@property
@pulumi.getter(name="snapshotName")
def snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of a snapshot from which to restore data into the new cluster.
"""
return pulumi.get(self, "snapshot_name")
@snapshot_name.setter
def snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_name", value)
@property
@pulumi.getter(name="snapshotRetentionLimit")
def snapshot_retention_limit(self) -> Optional[pulumi.Input[int]]:
"""
The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
"""
return pulumi.get(self, "snapshot_retention_limit")
@snapshot_retention_limit.setter
def snapshot_retention_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "snapshot_retention_limit", value)
@property
@pulumi.getter(name="snapshotWindow")
def snapshot_window(self) -> Optional[pulumi.Input[str]]:
"""
The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
"""
return pulumi.get(self, "snapshot_window")
@snapshot_window.setter
def snapshot_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_window", value)
@property
@pulumi.getter(name="snsTopicArn")
def sns_topic_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the SNS topic to which cluster notifications are sent.
"""
return pulumi.get(self, "sns_topic_arn")
@sns_topic_arn.setter
def sns_topic_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sns_topic_arn", value)
@property
@pulumi.getter(name="subnetGroupName")
def subnet_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
"""
return pulumi.get(self, "subnet_group_name")
@subnet_group_name.setter
def subnet_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_group_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter(name="tlsEnabled")
def tls_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
return pulumi.get(self, "tls_enabled")
@tls_enabled.setter
def tls_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls_enabled", value)
@pulumi.input_type
class _ClusterState:
def __init__(__self__, *,
acl_name: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,
cluster_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterClusterEndpointArgs']]]] = None,
description: Optional[pulumi.Input[str]] = None,
engine_patch_version: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
final_snapshot_name: Optional[pulumi.Input[str]] = None,
kms_key_arn: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
num_replicas_per_shard: Optional[pulumi.Input[int]] = None,
num_shards: Optional[pulumi.Input[int]] = None,
parameter_group_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
shards: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterShardArgs']]]] = None,
snapshot_arns: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
snapshot_retention_limit: Optional[pulumi.Input[int]] = None,
snapshot_window: Optional[pulumi.Input[str]] = None,
sns_topic_arn: Optional[pulumi.Input[str]] = None,
subnet_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_enabled: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering Cluster resources.
:param pulumi.Input[str] acl_name: The name of the Access Control List to associate with the cluster.
:param pulumi.Input[str] arn: The ARN of the cluster.
* `cluster_endpoint`
:param pulumi.Input[bool] auto_minor_version_upgrade: When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
:param pulumi.Input[str] engine_patch_version: Patch version number of the Redis engine used by the cluster.
:param pulumi.Input[str] engine_version: Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
:param pulumi.Input[str] final_snapshot_name: Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
:param pulumi.Input[str] kms_key_arn: ARN of the KMS key used to encrypt the cluster at rest.
:param pulumi.Input[str] maintenance_window: Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
:param pulumi.Input[str] name: Name of this node.
* `endpoint`
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[str] node_type: The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
:param pulumi.Input[int] num_replicas_per_shard: The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
:param pulumi.Input[int] num_shards: The number of shards in the cluster. Defaults to `1`.
:param pulumi.Input[str] parameter_group_name: The name of the parameter group associated with the cluster.
:param pulumi.Input[int] port: The port number on which each of the nodes accepts connections. Defaults to `6379`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: Set of VPC Security Group ID-s to associate with this cluster.
:param pulumi.Input[Sequence[pulumi.Input['ClusterShardArgs']]] shards: Set of shards in this cluster.
:param pulumi.Input[str] snapshot_arns: List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
:param pulumi.Input[str] snapshot_name: The name of a snapshot from which to restore data into the new cluster.
:param pulumi.Input[int] snapshot_retention_limit: The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
:param pulumi.Input[str] snapshot_window: The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
:param pulumi.Input[str] sns_topic_arn: ARN of the SNS topic to which cluster notifications are sent.
:param pulumi.Input[str] subnet_group_name: The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[bool] tls_enabled: A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
if acl_name is not None:
pulumi.set(__self__, "acl_name", acl_name)
if arn is not None:
pulumi.set(__self__, "arn", arn)
if auto_minor_version_upgrade is not None:
pulumi.set(__self__, "auto_minor_version_upgrade", auto_minor_version_upgrade)
if cluster_endpoints is not None:
pulumi.set(__self__, "cluster_endpoints", cluster_endpoints)
if description is not None:
pulumi.set(__self__, "description", description)
if engine_patch_version is not None:
pulumi.set(__self__, "engine_patch_version", engine_patch_version)
if engine_version is not None:
pulumi.set(__self__, "engine_version", engine_version)
if final_snapshot_name is not None:
pulumi.set(__self__, "final_snapshot_name", final_snapshot_name)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
if name is not None:
pulumi.set(__self__, "name", name)
if name_prefix is not None:
pulumi.set(__self__, "name_prefix", name_prefix)
if node_type is not None:
pulumi.set(__self__, "node_type", node_type)
if num_replicas_per_shard is not None:
pulumi.set(__self__, "num_replicas_per_shard", num_replicas_per_shard)
if num_shards is not None:
pulumi.set(__self__, "num_shards", num_shards)
if parameter_group_name is not None:
pulumi.set(__self__, "parameter_group_name", parameter_group_name)
if port is not None:
pulumi.set(__self__, "port", port)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if shards is not None:
pulumi.set(__self__, "shards", shards)
if snapshot_arns is not None:
pulumi.set(__self__, "snapshot_arns", snapshot_arns)
if snapshot_name is not None:
pulumi.set(__self__, "snapshot_name", snapshot_name)
if snapshot_retention_limit is not None:
pulumi.set(__self__, "snapshot_retention_limit", snapshot_retention_limit)
if snapshot_window is not None:
pulumi.set(__self__, "snapshot_window", snapshot_window)
if sns_topic_arn is not None:
pulumi.set(__self__, "sns_topic_arn", sns_topic_arn)
if subnet_group_name is not None:
pulumi.set(__self__, "subnet_group_name", subnet_group_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if tls_enabled is not None:
pulumi.set(__self__, "tls_enabled", tls_enabled)
@property
@pulumi.getter(name="aclName")
def acl_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Access Control List to associate with the cluster.
"""
return pulumi.get(self, "acl_name")
@acl_name.setter
def acl_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acl_name", value)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the cluster.
* `cluster_endpoint`
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="autoMinorVersionUpgrade")
def auto_minor_version_upgrade(self) -> Optional[pulumi.Input[bool]]:
"""
When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
"""
return pulumi.get(self, "auto_minor_version_upgrade")
@auto_minor_version_upgrade.setter
def auto_minor_version_upgrade(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auto_minor_version_upgrade", value)
@property
@pulumi.getter(name="clusterEndpoints")
def cluster_endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterClusterEndpointArgs']]]]:
return pulumi.get(self, "cluster_endpoints")
@cluster_endpoints.setter
def cluster_endpoints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterClusterEndpointArgs']]]]):
pulumi.set(self, "cluster_endpoints", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="enginePatchVersion")
def engine_patch_version(self) -> Optional[pulumi.Input[str]]:
"""
Patch version number of the Redis engine used by the cluster.
"""
return pulumi.get(self, "engine_patch_version")
@engine_patch_version.setter
def engine_patch_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine_patch_version", value)
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> Optional[pulumi.Input[str]]:
"""
Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
"""
return pulumi.get(self, "engine_version")
@engine_version.setter
def engine_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine_version", value)
@property
@pulumi.getter(name="finalSnapshotName")
def final_snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
"""
return pulumi.get(self, "final_snapshot_name")
@final_snapshot_name.setter
def final_snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "final_snapshot_name", value)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the KMS key used to encrypt the cluster at rest.
"""
return pulumi.get(self, "kms_key_arn")
@kms_key_arn.setter
def kms_key_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_arn", value)
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
"""
return pulumi.get(self, "maintenance_window")
@maintenance_window.setter
def maintenance_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maintenance_window", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of this node.
* `endpoint`
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> Optional[pulumi.Input[str]]:
"""
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
"""
return pulumi.get(self, "name_prefix")
@name_prefix.setter
def name_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_prefix", value)
@property
@pulumi.getter(name="nodeType")
def node_type(self) -> Optional[pulumi.Input[str]]:
"""
The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
"""
return pulumi.get(self, "node_type")
@node_type.setter
def node_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_type", value)
@property
@pulumi.getter(name="numReplicasPerShard")
def num_replicas_per_shard(self) -> Optional[pulumi.Input[int]]:
"""
The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
"""
return pulumi.get(self, "num_replicas_per_shard")
@num_replicas_per_shard.setter
def num_replicas_per_shard(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_replicas_per_shard", value)
@property
@pulumi.getter(name="numShards")
def num_shards(self) -> Optional[pulumi.Input[int]]:
"""
The number of shards in the cluster. Defaults to `1`.
"""
return pulumi.get(self, "num_shards")
@num_shards.setter
def num_shards(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "num_shards", value)
@property
@pulumi.getter(name="parameterGroupName")
def parameter_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the parameter group associated with the cluster.
"""
return pulumi.get(self, "parameter_group_name")
@parameter_group_name.setter
def parameter_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parameter_group_name", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number on which each of the nodes accepts connections. Defaults to `6379`.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Set of VPC Security Group ID-s to associate with this cluster.
"""
return pulumi.get(self, "security_group_ids")
@security_group_ids.setter
def security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "security_group_ids", value)
@property
@pulumi.getter
def shards(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterShardArgs']]]]:
"""
Set of shards in this cluster.
"""
return pulumi.get(self, "shards")
@shards.setter
def shards(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterShardArgs']]]]):
pulumi.set(self, "shards", value)
@property
@pulumi.getter(name="snapshotArns")
def snapshot_arns(self) -> Optional[pulumi.Input[str]]:
"""
List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
"""
return pulumi.get(self, "snapshot_arns")
@snapshot_arns.setter
def snapshot_arns(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_arns", value)
@property
@pulumi.getter(name="snapshotName")
def snapshot_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of a snapshot from which to restore data into the new cluster.
"""
return pulumi.get(self, "snapshot_name")
@snapshot_name.setter
def snapshot_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_name", value)
@property
@pulumi.getter(name="snapshotRetentionLimit")
def snapshot_retention_limit(self) -> Optional[pulumi.Input[int]]:
"""
The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
"""
return pulumi.get(self, "snapshot_retention_limit")
@snapshot_retention_limit.setter
def snapshot_retention_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "snapshot_retention_limit", value)
@property
@pulumi.getter(name="snapshotWindow")
def snapshot_window(self) -> Optional[pulumi.Input[str]]:
"""
The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
"""
return pulumi.get(self, "snapshot_window")
@snapshot_window.setter
def snapshot_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_window", value)
@property
@pulumi.getter(name="snsTopicArn")
def sns_topic_arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the SNS topic to which cluster notifications are sent.
"""
return pulumi.get(self, "sns_topic_arn")
@sns_topic_arn.setter
def sns_topic_arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sns_topic_arn", value)
@property
@pulumi.getter(name="subnetGroupName")
def subnet_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
"""
return pulumi.get(self, "subnet_group_name")
@subnet_group_name.setter
def subnet_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_group_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter(name="tlsEnabled")
def tls_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
return pulumi.get(self, "tls_enabled")
@tls_enabled.setter
def tls_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls_enabled", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl_name: Optional[pulumi.Input[str]] = None,
auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
final_snapshot_name: Optional[pulumi.Input[str]] = None,
kms_key_arn: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
num_replicas_per_shard: Optional[pulumi.Input[int]] = None,
num_shards: Optional[pulumi.Input[int]] = None,
parameter_group_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
snapshot_arns: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
snapshot_retention_limit: Optional[pulumi.Input[int]] = None,
snapshot_window: Optional[pulumi.Input[str]] = None,
sns_topic_arn: Optional[pulumi.Input[str]] = None,
subnet_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_enabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Provides a MemoryDB Cluster.
More information about MemoryDB can be found in the [Developer Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/what-is-memorydb-for-redis.html).
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.memorydb.Cluster("example",
acl_name="open-access",
node_type="db.t4g.small",
num_shards=2,
security_group_ids=[aws_security_group["example"]["id"]],
snapshot_retention_limit=7,
subnet_group_name=aws_memorydb_subnet_group["example"]["id"])
```
## Import
Use the `name` to import a cluster. For example
```sh
$ pulumi import aws:memorydb/cluster:Cluster example my-cluster
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl_name: The name of the Access Control List to associate with the cluster.
:param pulumi.Input[bool] auto_minor_version_upgrade: When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
:param pulumi.Input[str] engine_version: Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
:param pulumi.Input[str] final_snapshot_name: Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
:param pulumi.Input[str] kms_key_arn: ARN of the KMS key used to encrypt the cluster at rest.
:param pulumi.Input[str] maintenance_window: Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
:param pulumi.Input[str] name: Name of this node.
* `endpoint`
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[str] node_type: The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
:param pulumi.Input[int] num_replicas_per_shard: The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
:param pulumi.Input[int] num_shards: The number of shards in the cluster. Defaults to `1`.
:param pulumi.Input[str] parameter_group_name: The name of the parameter group associated with the cluster.
:param pulumi.Input[int] port: The port number on which each of the nodes accepts connections. Defaults to `6379`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: Set of VPC Security Group ID-s to associate with this cluster.
:param pulumi.Input[str] snapshot_arns: List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
:param pulumi.Input[str] snapshot_name: The name of a snapshot from which to restore data into the new cluster.
:param pulumi.Input[int] snapshot_retention_limit: The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
:param pulumi.Input[str] snapshot_window: The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
:param pulumi.Input[str] sns_topic_arn: ARN of the SNS topic to which cluster notifications are sent.
:param pulumi.Input[str] subnet_group_name: The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[bool] tls_enabled: A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a MemoryDB Cluster.
More information about MemoryDB can be found in the [Developer Guide](https://docs.aws.amazon.com/memorydb/latest/devguide/what-is-memorydb-for-redis.html).
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.memorydb.Cluster("example",
acl_name="open-access",
node_type="db.t4g.small",
num_shards=2,
security_group_ids=[aws_security_group["example"]["id"]],
snapshot_retention_limit=7,
subnet_group_name=aws_memorydb_subnet_group["example"]["id"])
```
## Import
Use the `name` to import a cluster. For example
```sh
$ pulumi import aws:memorydb/cluster:Cluster example my-cluster
```
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acl_name: Optional[pulumi.Input[str]] = None,
auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
final_snapshot_name: Optional[pulumi.Input[str]] = None,
kms_key_arn: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
num_replicas_per_shard: Optional[pulumi.Input[int]] = None,
num_shards: Optional[pulumi.Input[int]] = None,
parameter_group_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
snapshot_arns: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
snapshot_retention_limit: Optional[pulumi.Input[int]] = None,
snapshot_window: Optional[pulumi.Input[str]] = None,
sns_topic_arn: Optional[pulumi.Input[str]] = None,
subnet_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_enabled: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
if acl_name is None and not opts.urn:
raise TypeError("Missing required property 'acl_name'")
__props__.__dict__["acl_name"] = acl_name
__props__.__dict__["auto_minor_version_upgrade"] = auto_minor_version_upgrade
__props__.__dict__["description"] = description
__props__.__dict__["engine_version"] = engine_version
__props__.__dict__["final_snapshot_name"] = final_snapshot_name
__props__.__dict__["kms_key_arn"] = kms_key_arn
__props__.__dict__["maintenance_window"] = maintenance_window
__props__.__dict__["name"] = name
__props__.__dict__["name_prefix"] = name_prefix
if node_type is None and not opts.urn:
raise TypeError("Missing required property 'node_type'")
__props__.__dict__["node_type"] = node_type
__props__.__dict__["num_replicas_per_shard"] = num_replicas_per_shard
__props__.__dict__["num_shards"] = num_shards
__props__.__dict__["parameter_group_name"] = parameter_group_name
__props__.__dict__["port"] = port
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["snapshot_arns"] = snapshot_arns
__props__.__dict__["snapshot_name"] = snapshot_name
__props__.__dict__["snapshot_retention_limit"] = snapshot_retention_limit
__props__.__dict__["snapshot_window"] = snapshot_window
__props__.__dict__["sns_topic_arn"] = sns_topic_arn
__props__.__dict__["subnet_group_name"] = subnet_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["tls_enabled"] = tls_enabled
__props__.__dict__["arn"] = None
__props__.__dict__["cluster_endpoints"] = None
__props__.__dict__["engine_patch_version"] = None
__props__.__dict__["shards"] = None
super(Cluster, __self__).__init__(
'aws:memorydb/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acl_name: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,
cluster_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterClusterEndpointArgs']]]]] = None,
description: Optional[pulumi.Input[str]] = None,
engine_patch_version: Optional[pulumi.Input[str]] = None,
engine_version: Optional[pulumi.Input[str]] = None,
final_snapshot_name: Optional[pulumi.Input[str]] = None,
kms_key_arn: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
num_replicas_per_shard: Optional[pulumi.Input[int]] = None,
num_shards: Optional[pulumi.Input[int]] = None,
parameter_group_name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
shards: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterShardArgs']]]]] = None,
snapshot_arns: Optional[pulumi.Input[str]] = None,
snapshot_name: Optional[pulumi.Input[str]] = None,
snapshot_retention_limit: Optional[pulumi.Input[int]] = None,
snapshot_window: Optional[pulumi.Input[str]] = None,
sns_topic_arn: Optional[pulumi.Input[str]] = None,
subnet_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tls_enabled: Optional[pulumi.Input[bool]] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acl_name: The name of the Access Control List to associate with the cluster.
:param pulumi.Input[str] arn: The ARN of the cluster.
* `cluster_endpoint`
:param pulumi.Input[bool] auto_minor_version_upgrade: When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
:param pulumi.Input[str] engine_patch_version: Patch version number of the Redis engine used by the cluster.
:param pulumi.Input[str] engine_version: Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
:param pulumi.Input[str] final_snapshot_name: Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
:param pulumi.Input[str] kms_key_arn: ARN of the KMS key used to encrypt the cluster at rest.
:param pulumi.Input[str] maintenance_window: Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
:param pulumi.Input[str] name: Name of this node.
* `endpoint`
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[str] node_type: The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
:param pulumi.Input[int] num_replicas_per_shard: The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
:param pulumi.Input[int] num_shards: The number of shards in the cluster. Defaults to `1`.
:param pulumi.Input[str] parameter_group_name: The name of the parameter group associated with the cluster.
:param pulumi.Input[int] port: The port number on which each of the nodes accepts connections. Defaults to `6379`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] security_group_ids: Set of VPC Security Group ID-s to associate with this cluster.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterShardArgs']]]] shards: Set of shards in this cluster.
:param pulumi.Input[str] snapshot_arns: List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
:param pulumi.Input[str] snapshot_name: The name of a snapshot from which to restore data into the new cluster.
:param pulumi.Input[int] snapshot_retention_limit: The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
:param pulumi.Input[str] snapshot_window: The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
:param pulumi.Input[str] sns_topic_arn: ARN of the SNS topic to which cluster notifications are sent.
:param pulumi.Input[str] subnet_group_name: The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
:param pulumi.Input[bool] tls_enabled: A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ClusterState.__new__(_ClusterState)
__props__.__dict__["acl_name"] = acl_name
__props__.__dict__["arn"] = arn
__props__.__dict__["auto_minor_version_upgrade"] = auto_minor_version_upgrade
__props__.__dict__["cluster_endpoints"] = cluster_endpoints
__props__.__dict__["description"] = description
__props__.__dict__["engine_patch_version"] = engine_patch_version
__props__.__dict__["engine_version"] = engine_version
__props__.__dict__["final_snapshot_name"] = final_snapshot_name
__props__.__dict__["kms_key_arn"] = kms_key_arn
__props__.__dict__["maintenance_window"] = maintenance_window
__props__.__dict__["name"] = name
__props__.__dict__["name_prefix"] = name_prefix
__props__.__dict__["node_type"] = node_type
__props__.__dict__["num_replicas_per_shard"] = num_replicas_per_shard
__props__.__dict__["num_shards"] = num_shards
__props__.__dict__["parameter_group_name"] = parameter_group_name
__props__.__dict__["port"] = port
__props__.__dict__["security_group_ids"] = security_group_ids
__props__.__dict__["shards"] = shards
__props__.__dict__["snapshot_arns"] = snapshot_arns
__props__.__dict__["snapshot_name"] = snapshot_name
__props__.__dict__["snapshot_retention_limit"] = snapshot_retention_limit
__props__.__dict__["snapshot_window"] = snapshot_window
__props__.__dict__["sns_topic_arn"] = sns_topic_arn
__props__.__dict__["subnet_group_name"] = subnet_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["tls_enabled"] = tls_enabled
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aclName")
def acl_name(self) -> pulumi.Output[str]:
"""
The name of the Access Control List to associate with the cluster.
"""
return pulumi.get(self, "acl_name")
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN of the cluster.
* `cluster_endpoint`
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="autoMinorVersionUpgrade")
def auto_minor_version_upgrade(self) -> pulumi.Output[Optional[bool]]:
"""
When set to `true`, the cluster will automatically receive minor engine version upgrades after launch. Defaults to `true`.
"""
return pulumi.get(self, "auto_minor_version_upgrade")
@property
@pulumi.getter(name="clusterEndpoints")
def cluster_endpoints(self) -> pulumi.Output[Sequence['outputs.ClusterClusterEndpoint']]:
return pulumi.get(self, "cluster_endpoints")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="enginePatchVersion")
def engine_patch_version(self) -> pulumi.Output[str]:
"""
Patch version number of the Redis engine used by the cluster.
"""
return pulumi.get(self, "engine_patch_version")
@property
@pulumi.getter(name="engineVersion")
def engine_version(self) -> pulumi.Output[str]:
"""
Version number of the Redis engine to be used for the cluster. Downgrades are not supported.
"""
return pulumi.get(self, "engine_version")
@property
@pulumi.getter(name="finalSnapshotName")
def final_snapshot_name(self) -> pulumi.Output[Optional[str]]:
"""
Name of the final cluster snapshot to be created when this resource is deleted. If omitted, no final snapshot will be made.
"""
return pulumi.get(self, "final_snapshot_name")
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> pulumi.Output[Optional[str]]:
"""
ARN of the KMS key used to encrypt the cluster at rest.
"""
return pulumi.get(self, "kms_key_arn")
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> pulumi.Output[str]:
"""
Specifies the weekly time range during which maintenance on the cluster is performed. It is specified as a range in the format `ddd:hh24:mi-ddd:hh24:mi` (24H Clock UTC). The minimum maintenance window is a 60 minute period. Example: `sun:23:00-mon:01:30`.
"""
return pulumi.get(self, "maintenance_window")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of this node.
* `endpoint`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> pulumi.Output[str]:
"""
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
"""
return pulumi.get(self, "name_prefix")
@property
@pulumi.getter(name="nodeType")
def node_type(self) -> pulumi.Output[str]:
"""
The compute and memory capacity of the nodes in the cluster. See AWS documentation on [supported node types](https://docs.aws.amazon.com/memorydb/latest/devguide/nodes.supportedtypes.html) as well as [vertical scaling](https://docs.aws.amazon.com/memorydb/latest/devguide/cluster-vertical-scaling.html).
"""
return pulumi.get(self, "node_type")
@property
@pulumi.getter(name="numReplicasPerShard")
def num_replicas_per_shard(self) -> pulumi.Output[Optional[int]]:
"""
The number of replicas to apply to each shard, up to a maximum of 5. Defaults to `1` (i.e. 2 nodes per shard).
"""
return pulumi.get(self, "num_replicas_per_shard")
@property
@pulumi.getter(name="numShards")
def num_shards(self) -> pulumi.Output[Optional[int]]:
"""
The number of shards in the cluster. Defaults to `1`.
"""
return pulumi.get(self, "num_shards")
@property
@pulumi.getter(name="parameterGroupName")
def parameter_group_name(self) -> pulumi.Output[str]:
"""
The name of the parameter group associated with the cluster.
"""
return pulumi.get(self, "parameter_group_name")
@property
@pulumi.getter
def port(self) -> pulumi.Output[int]:
"""
The port number on which each of the nodes accepts connections. Defaults to `6379`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Set of VPC Security Group ID-s to associate with this cluster.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter
def shards(self) -> pulumi.Output[Sequence['outputs.ClusterShard']]:
"""
Set of shards in this cluster.
"""
return pulumi.get(self, "shards")
@property
@pulumi.getter(name="snapshotArns")
def snapshot_arns(self) -> pulumi.Output[Optional[str]]:
"""
List of ARN-s that uniquely identify RDB snapshot files stored in S3. The snapshot files will be used to populate the new cluster. Object names in the ARN-s cannot contain any commas.
"""
return pulumi.get(self, "snapshot_arns")
@property
@pulumi.getter(name="snapshotName")
def snapshot_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of a snapshot from which to restore data into the new cluster.
"""
return pulumi.get(self, "snapshot_name")
@property
@pulumi.getter(name="snapshotRetentionLimit")
def snapshot_retention_limit(self) -> pulumi.Output[int]:
"""
The number of days for which MemoryDB retains automatic snapshots before deleting them. When set to `0`, automatic backups are disabled. Defaults to `0`.
"""
return pulumi.get(self, "snapshot_retention_limit")
@property
@pulumi.getter(name="snapshotWindow")
def snapshot_window(self) -> pulumi.Output[str]:
"""
The daily time range (in UTC) during which MemoryDB begins taking a daily snapshot of your shard. Example: `05:00-09:00`.
"""
return pulumi.get(self, "snapshot_window")
@property
@pulumi.getter(name="snsTopicArn")
def sns_topic_arn(self) -> pulumi.Output[Optional[str]]:
"""
ARN of the SNS topic to which cluster notifications are sent.
"""
return pulumi.get(self, "sns_topic_arn")
@property
@pulumi.getter(name="subnetGroupName")
def subnet_group_name(self) -> pulumi.Output[str]:
"""
The name of the subnet group to be used for the cluster. Defaults to a subnet group consisting of default VPC subnets.
"""
return pulumi.get(self, "subnet_group_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A map of tags to assign to the resource. If configured with a provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
"""
A map of tags assigned to the resource, including those inherited from the provider [`default_tags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block).
"""
return pulumi.get(self, "tags_all")
@property
@pulumi.getter(name="tlsEnabled")
def tls_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
A flag to enable in-transit encryption on the cluster. When set to `false`, the `acl_name` must be `open-access`. Defaults to `true`.
"""
return pulumi.get(self, "tls_enabled")
| 52.303971
| 348
| 0.672064
| 9,396
| 72,441
| 4.981056
| 0.037995
| 0.090018
| 0.072988
| 0.064399
| 0.960173
| 0.948549
| 0.938827
| 0.927311
| 0.918807
| 0.904983
| 0
| 0.004476
| 0.222802
| 72,441
| 1,384
| 349
| 52.341763
| 0.826806
| 0.375823
| 0
| 0.852121
| 1
| 0
| 0.108187
| 0.022425
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168485
| false
| 0.001212
| 0.008485
| 0.006061
| 0.278788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2e020b712de72d298eb2562bc120dce5bcb444f6
| 14,792
|
py
|
Python
|
scripts/createGermlineMutations.py
|
leonorss/Bachelor-Thesis
|
c00aa4950ece82215c0af0852793f71a34c93001
|
[
"MIT"
] | null | null | null |
scripts/createGermlineMutations.py
|
leonorss/Bachelor-Thesis
|
c00aa4950ece82215c0af0852793f71a34c93001
|
[
"MIT"
] | null | null | null |
scripts/createGermlineMutations.py
|
leonorss/Bachelor-Thesis
|
c00aa4950ece82215c0af0852793f71a34c93001
|
[
"MIT"
] | null | null | null |
import vcf
import random
from Bio import SeqIO
# read in parameters from config File
n_g_m = snakemake.params[0]
f_h_m = snakemake.params[1]
# create a seed for reproducibilaty
configSeed = snakemake.params[2]
random.seed(configSeed)
# number if Homo- and Heterozygous Mutations according to the config File
numberOfHeterozygousGermlineMutations = int(n_g_m * f_h_m)
numberOfHomozygousGermlineMutations = n_g_m - numberOfHeterozygousGermlineMutations
# read in reference Genome to check that we don't insert Mutations, that actually don't change the Genome
referenceGenome = SeqIO.read(snakemake.params[3], "fasta")
referenceGenomeLength = len(referenceGenome)
# create two vcf Files for the 2 allels with the given header
vcfReader_MetaData = vcf.Reader(filename=snakemake.params[4])
vcfWriter1 = vcf.Writer(open(snakemake.output[0], 'w'), vcfReader_MetaData)
vcfWriter2 = vcf.Writer(open(snakemake.output[1], 'w'), vcfReader_MetaData)
chromosomId = int(next(iter(vcfReader_MetaData.contigs)))
# creating specified number of random homozygous mutations: 1=A, 2=C, 3=G, 4=T
for homozygousMutation in range(0, numberOfHomozygousGermlineMutations):
inserted = 0
# checking that we didn't create a mutation that din't actually change the reference genome
while(inserted == 0):
# choosing random Nucleotid and random place to insert
newMutation = random.randrange(1, 5)
insertPlace = random.randrange(0, referenceGenomeLength)
alreadyMutated = 0
# case nucleotid="A"
if newMutation==1:
mutatedNucleotid = "A"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("a" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
if homozygousMutation != 0:
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
# inserting the mutations into the two vcf files
if alreadyMutated == 0:
inserted = 1
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
vcfWriter1.write_record(record)
vcfWriter2.write_record(record)
vcfWriter1.flush()
vcfWriter2.flush()
# case nucleotid="C"
elif newMutation==2:
mutatedNucleotid = "C"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("c" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
if homozygousMutation != 0:
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
# inserting the mutations into the two vcf files
if alreadyMutated == 0:
inserted = 1
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
vcfWriter1.write_record(record)
vcfWriter2.write_record(record)
vcfWriter1.flush()
vcfWriter2.flush()
# case nucleotid="G"
elif newMutation==3:
mutatedNucleotid = "G"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("g" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
if homozygousMutation != 0:
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
# inserting the mutations into the two vcf files
if alreadyMutated == 0:
inserted = 1
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
vcfWriter1.write_record(record)
vcfWriter2.write_record(record)
vcfWriter1.flush()
vcfWriter2.flush()
# case nucleotid="T"
else:
mutatedNucleotid = "T"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("t" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
if homozygousMutation != 0:
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
# inserting the mutations into the two vcf files
if alreadyMutated == 0:
inserted = 1
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
vcfWriter1.write_record(record)
vcfWriter2.write_record(record)
vcfWriter1.flush()
vcfWriter2.flush()
# creating specified number of random heterozygous mutations: 1=A, 2=C, 3=G, 4=T
for heterozygousMutation in range(0, numberOfHeterozygousGermlineMutations):
inserted = 0
# choose either Allel randomly
allel = random.randint(1, 2)
# checking that we didn't create a mutation that din't actually change the reference genome
while(inserted == 0):
# choosing random Nucleotid and random place to insert
newMutation = random.randrange(1, 4)
insertPlace = random.randrange(0, (referenceGenomeLength-1))
alreadyMutated = 0
# case nucleotid="A"
if newMutation==1:
mutatedNucleotid = "A"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("a" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
# insert record into randomly choosen allel
if (allel == 1):
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter1.write_record(record)
vcfWriter1.flush()
else:
vcfReader2 = vcf.Reader(filename=snakemake.output[1])
for existingRecord in vcfReader2:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter2.write_record(record)
vcfWriter2.flush()
# case nucleotid="C"
elif newMutation==2:
mutatedNucleotid = "C"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("c" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
# insert record into randomly choosen allel
if (allel == 1):
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter1.write_record(record)
vcfWriter1.flush()
else:
vcfReader2 = vcf.Reader(filename=snakemake.output[1])
for existingRecord in vcfReader2:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter2.write_record(record)
vcfWriter2.flush()
# case nucleotid="G"
elif newMutation==3:
mutatedNucleotid = "G"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("g" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
# insert record into randomly choosen allel
if (allel == 1):
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter1.write_record(record)
vcfWriter1.flush()
else:
vcfReader2 = vcf.Reader(filename=snakemake.output[1])
for existingRecord in vcfReader2:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter2.write_record(record)
vcfWriter2.flush()
# case nucleotid="T"
else:
mutatedNucleotid = "T"
# making sure the nucleotid is not the same as in the reference genome or that we mutated the same nucleotide twize or not sequenced
if (mutatedNucleotid != referenceGenome[insertPlace]) and ("t" != referenceGenome[insertPlace]) and ("N" != referenceGenome[insertPlace]):
record = vcf.model._Record(CHROM=chromosomId, POS=(insertPlace+1), ID='.',
REF=vcf.model._Substitution(referenceGenome[insertPlace]),
ALT=[vcf.model._Substitution(mutatedNucleotid)], QUAL='.', FILTER='PASS', INFO={},
FORMAT=".", sample_indexes=[], samples=None)
# insert record into randomly choosen allel
if (allel == 1):
vcfReader1 = vcf.Reader(filename=snakemake.output[0])
for existingRecord in vcfReader1:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter1.write_record(record)
vcfWriter1.flush()
else:
vcfReader2 = vcf.Reader(filename=snakemake.output[1])
for existingRecord in vcfReader2:
if insertPlace == existingRecord.POS:
alreadyMutated = 1
break
if alreadyMutated == 0:
inserted = 1
vcfWriter2.write_record(record)
vcfWriter2.flush()
# close writers
vcfWriter1.close()
vcfWriter2.close()
| 51.183391
| 150
| 0.546444
| 1,322
| 14,792
| 6.067322
| 0.109682
| 0.103728
| 0.057848
| 0.042139
| 0.876699
| 0.850019
| 0.850019
| 0.850019
| 0.850019
| 0.850019
| 0
| 0.017482
| 0.369659
| 14,792
| 288
| 151
| 51.361111
| 0.842771
| 0.158599
| 0
| 0.88785
| 0
| 0
| 0.007012
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.037383
| 0.014019
| 0
| 0.014019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e1f60ba3938bad9cbd645d4ed521b3816f8bf09
| 480,530
|
py
|
Python
|
unittest/test_amin.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
unittest/test_amin.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
unittest/test_amin.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Module: test_amin.py
# Purpose: arrayfunc unit test.
# Language: Python 3.4
# Date: 11-Jun-2014.
# Ver: 06-Mar-2020.
#
###############################################################################
#
# Copyright 2014 - 2020 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
"""This conducts unit tests for amin.
"""
##############################################################################
import sys
import array
import itertools
import math
import operator
import platform
import copy
import unittest
import arrayfunc
##############################################################################
##############################################################################
# The following code is all auto-generated.
##############################################################################
class amin_general_even_arraysize_with_simd_b(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('b', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('b', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('b', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('b', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('b', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code b. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code b. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code b. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code b. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code b. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code b. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_b(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
self.gentest = array.array('b', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code b. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code b. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code b. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code b. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_b(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('b', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('b', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('b', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('b', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('b', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code b. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code b. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code b. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code b. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code b. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code b. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_b(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
self.gentest = array.array('b', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code b. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code b. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code b. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code b. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_b(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('b', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('b', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('b', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('b', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('b', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code b. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code b. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code b. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code b. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code b. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code b. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_b(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
self.gentest = array.array('b', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code b. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code b. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code b. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code b. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_b(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('b', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('b', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('b', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('b', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('b', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code b. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code b. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code b. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code b. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code b. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code b. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_b(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.b_max
MinVal = arrayfunc.arraylimits.b_min
self.gentest = array.array('b', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code b. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code b. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code b. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code b. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_B(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('B', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('B', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('B', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('B', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('B', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code B. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code B. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code B. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code B. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code B. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code B. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_B(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
self.gentest = array.array('B', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code B. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code B. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code B. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code B. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_B(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('B', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('B', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('B', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('B', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('B', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code B. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code B. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code B. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code B. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code B. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code B. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_B(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
self.gentest = array.array('B', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code B. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code B. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code B. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code B. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_B(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('B', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('B', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('B', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('B', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('B', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code B. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code B. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code B. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code B. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code B. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code B. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_B(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
self.gentest = array.array('B', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code B. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code B. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code B. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code B. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_B(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('B', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('B', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('B', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('B', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('B', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code B. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code B. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code B. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code B. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code B. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code B. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_B(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.B_max
MinVal = arrayfunc.arraylimits.B_min
self.gentest = array.array('B', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code B. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code B. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code B. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code B. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_h(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('h', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('h', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('h', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('h', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('h', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code h. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code h. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code h. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code h. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code h. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code h. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_h(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
self.gentest = array.array('h', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code h. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code h. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code h. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code h. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_h(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('h', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('h', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('h', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('h', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('h', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code h. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code h. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code h. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code h. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code h. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code h. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_h(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
self.gentest = array.array('h', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code h. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code h. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code h. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code h. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_h(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('h', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('h', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('h', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('h', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('h', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code h. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code h. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code h. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code h. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code h. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code h. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_h(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
self.gentest = array.array('h', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code h. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code h. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code h. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code h. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_h(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('h', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('h', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('h', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('h', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('h', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code h. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code h. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code h. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code h. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code h. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code h. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_h(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.h_max
MinVal = arrayfunc.arraylimits.h_min
self.gentest = array.array('h', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code h. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code h. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code h. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code h. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_H(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('H', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('H', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('H', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('H', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('H', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code H. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code H. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code H. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code H. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code H. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code H. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_H(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
self.gentest = array.array('H', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code H. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code H. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code H. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code H. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_H(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('H', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('H', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('H', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('H', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('H', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code H. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code H. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code H. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code H. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code H. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code H. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_H(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
self.gentest = array.array('H', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code H. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code H. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code H. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code H. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_H(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('H', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('H', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('H', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('H', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('H', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code H. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code H. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code H. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code H. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code H. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code H. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_H(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
self.gentest = array.array('H', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code H. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code H. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code H. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code H. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_H(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('H', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('H', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('H', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('H', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('H', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code H. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code H. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code H. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code H. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code H. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code H. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_H(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.H_max
MinVal = arrayfunc.arraylimits.H_min
self.gentest = array.array('H', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code H. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code H. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code H. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code H. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_i(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('i', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('i', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('i', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('i', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('i', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code i. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code i. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code i. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code i. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code i. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code i. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_i(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
self.gentest = array.array('i', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code i. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code i. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code i. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code i. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_i(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('i', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('i', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('i', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('i', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('i', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code i. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code i. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code i. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code i. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code i. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code i. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_i(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
self.gentest = array.array('i', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code i. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code i. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code i. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code i. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_i(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('i', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('i', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('i', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('i', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('i', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code i. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code i. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code i. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code i. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code i. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code i. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_i(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
self.gentest = array.array('i', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code i. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code i. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code i. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code i. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_i(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('i', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('i', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('i', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('i', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('i', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code i. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code i. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code i. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code i. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code i. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code i. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_i(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.i_max
MinVal = arrayfunc.arraylimits.i_min
self.gentest = array.array('i', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code i. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code i. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code i. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code i. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_I(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('I', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('I', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('I', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('I', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('I', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code I. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code I. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code I. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code I. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code I. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code I. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_I(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
self.gentest = array.array('I', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code I. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code I. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code I. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code I. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_I(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('I', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('I', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('I', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('I', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('I', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code I. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code I. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code I. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code I. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code I. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code I. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_I(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
self.gentest = array.array('I', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code I. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code I. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code I. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code I. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_I(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('I', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('I', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('I', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('I', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('I', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code I. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code I. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code I. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code I. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code I. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code I. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_I(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
self.gentest = array.array('I', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code I. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code I. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code I. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code I. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_I(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('I', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('I', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('I', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('I', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('I', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code I. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code I. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code I. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code I. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code I. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code I. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_I(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.I_max
MinVal = arrayfunc.arraylimits.I_min
self.gentest = array.array('I', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code I. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code I. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code I. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code I. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_l(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('l', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('l', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('l', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('l', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('l', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code l. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code l. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code l. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code l. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code l. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code l. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_l(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
self.gentest = array.array('l', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code l. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code l. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code l. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code l. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_l(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('l', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('l', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('l', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('l', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('l', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code l. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code l. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code l. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code l. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code l. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code l. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_l(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
self.gentest = array.array('l', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code l. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code l. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code l. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code l. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_l(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('l', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('l', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('l', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('l', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('l', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code l. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code l. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code l. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code l. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code l. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code l. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_l(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
self.gentest = array.array('l', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code l. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code l. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code l. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code l. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_l(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('l', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('l', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('l', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('l', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('l', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code l. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code l. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code l. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code l. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code l. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code l. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_l(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.l_max
MinVal = arrayfunc.arraylimits.l_min
self.gentest = array.array('l', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code l. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code l. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code l. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code l. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_L(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('L', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('L', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('L', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('L', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('L', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code L. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code L. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code L. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code L. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code L. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code L. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_L(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
self.gentest = array.array('L', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code L. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code L. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code L. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code L. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_L(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('L', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('L', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('L', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('L', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('L', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code L. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code L. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code L. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code L. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code L. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code L. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_L(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
self.gentest = array.array('L', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code L. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code L. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code L. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code L. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_L(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('L', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('L', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('L', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('L', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('L', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code L. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code L. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code L. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code L. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code L. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code L. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_L(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
self.gentest = array.array('L', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code L. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code L. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code L. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code L. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_L(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('L', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('L', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('L', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('L', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('L', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code L. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code L. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code L. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code L. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code L. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code L. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_L(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.L_max
MinVal = arrayfunc.arraylimits.L_min
self.gentest = array.array('L', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code L. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code L. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code L. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code L. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code q. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code q. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code q. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code q. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code q. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code q. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
self.gentest = array.array('q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code q. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code q. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code q. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code q. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code q. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code q. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code q. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code q. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code q. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code q. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
self.gentest = array.array('q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code q. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code q. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code q. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code q. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code q. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code q. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code q. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code q. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code q. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code q. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
self.gentest = array.array('q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code q. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code q. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code q. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code q. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code q. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code q. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code q. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code q. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code q. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code q. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.q_max
MinVal = arrayfunc.arraylimits.q_min
self.gentest = array.array('q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code q. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code q. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code q. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code q. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_Q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('Q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('Q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('Q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code Q. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code Q. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code Q. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code Q. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code Q. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code Q. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_Q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
self.gentest = array.array('Q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code Q. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code Q. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code Q. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code Q. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_Q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('Q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('Q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('Q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code Q. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code Q. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code Q. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code Q. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code Q. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code Q. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_Q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
self.gentest = array.array('Q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code Q. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code Q. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code Q. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code Q. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_Q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('Q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('Q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('Q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code Q. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code Q. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code Q. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code Q. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code Q. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code Q. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_Q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
self.gentest = array.array('Q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code Q. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code Q. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code Q. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code Q. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_Q(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('Q', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('Q', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('Q', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('Q', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code Q. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code Q. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code Q. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code Q. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code Q. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code Q. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_Q(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.Q_max
MinVal = arrayfunc.arraylimits.Q_min
self.gentest = array.array('Q', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code Q. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code Q. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code Q. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code Q. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_f(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('f', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('f', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('f', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('f', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('f', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code f. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code f. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code f. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code f. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code f. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code f. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_f(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
self.gentest = array.array('f', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code f. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code f. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code f. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code f. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('f', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('f', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('f', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('f', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('f', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code f. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code f. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code f. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code f. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code f. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code f. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
self.gentest = array.array('f', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code f. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code f. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code f. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code f. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_f(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('f', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('f', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('f', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('f', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('f', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code f. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code f. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code f. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code f. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code f. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code f. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_f(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
self.gentest = array.array('f', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code f. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code f. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code f. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code f. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('f', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('f', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('f', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('f', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('f', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code f. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code f. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code f. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code f. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code f. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code f. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
self.gentest = array.array('f', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code f. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code f. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code f. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code f. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_with_simd_d(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('d', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('d', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('d', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('d', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('d', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code d. General test even length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code d. Test increasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code d. Test decreasing values even length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code d. Test finding max for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code d. Test finding value from array that contains min for data type even length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code d. Test optional maxlen parameter even length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_with_simd_d(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
self.gentest = array.array('d', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code d. Test invalid parameter type even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code d. Test missing parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code d. Test excess parameters even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code d. Test invalid keyword parameter even length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('d', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('d', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('d', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('d', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('d', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code d. General test odd length array with SIMD.
"""
result = arrayfunc.amin(self.gentest )
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code d. Test increasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.inctest )
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code d. Test decreasing values odd length array with SIMD.
"""
result = arrayfunc.amin(self.dectest )
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code d. Test finding max for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest )
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code d. Test finding value from array that contains min for data type odd length array with SIMD.
"""
result = arrayfunc.amin(self.minvaltest )
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code d. Test optional maxlen parameter odd length array with SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 )
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
self.gentest = array.array('d', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code d. Test invalid parameter type odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code d. Test missing parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code d. Test excess parameters odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code d. Test invalid keyword parameter odd length array with SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 )
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_even_arraysize_without_simd_d(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('d', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('d', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('d', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('d', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('d', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code d. General test even length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code d. Test increasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code d. Test decreasing values even length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code d. Test finding max for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code d. Test finding value from array that contains min for data type even length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code d. Test optional maxlen parameter even length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_even_arraysize_without_simd_d(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
self.gentest = array.array('d', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code d. Test invalid parameter type even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code d. Test missing parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code d. Test excess parameters even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code d. Test invalid keyword parameter even length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_general_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test amin for basic general function operation.
op_template_general
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# This is generated by a common template, so we need to make
# sure that in cases where we are using floating point values
# we don't pass floating point values for range().
# Plus, double precision calcuations will overflow when calculating
# the step value unless we convert to integer first. Python
# integers have no upper limit, and so will not overflow.
MaxInt = int(MaxVal)
MinInt = int(MinVal)
# Create some arbitrary data over a wide range of the data type. This
# creates evenly spaced data over a range straddling the mid point of the data.
midpoint = (MaxInt + MinInt) // 2
startval = (midpoint + MinInt) // 2
endval = (midpoint + MaxInt) // 2
stepval = (MaxInt - MinInt) // 100
halfrangeinc = list(range(startval, endval, stepval))
halfrangedec = list(range(endval, startval, -stepval))
gendata = list(itertools.chain.from_iterable(zip(halfrangeinc, halfrangedec)))
incdata = halfrangeinc
decdata = halfrangedec
maxvaldata = list(itertools.chain(halfrangeinc, [MaxVal], halfrangedec))
minvaldata = list((itertools.chain(halfrangeinc, [MinVal], halfrangedec)))
# Test arrays.
self.gentest = array.array('d', [x for x,y in zip(itertools.cycle(gendata), range(arraylength))])
self.inctest = array.array('d', [x for x,y in zip(itertools.cycle(incdata), range(arraylength))])
self.dectest = array.array('d', [x for x,y in zip(itertools.cycle(decdata), range(arraylength))])
self.maxvaltest = array.array('d', [x for x,y in zip(itertools.cycle(maxvaldata), range(arraylength))])
self.minvaltest = array.array('d', [x for x,y in zip(itertools.cycle(minvaldata), range(arraylength))])
########################################################
def test_amin_general_function_01(self):
"""Test amin - Array code d. General test odd length array without SIMD.
"""
result = arrayfunc.amin(self.gentest , nosimd=True)
self.assertEqual(result, min(self.gentest))
########################################################
def test_amin_general_function_02(self):
"""Test amin - Array code d. Test increasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.inctest , nosimd=True)
self.assertEqual(result, min(self.inctest))
########################################################
def test_amin_general_function_03(self):
"""Test amin - Array code d. Test decreasing values odd length array without SIMD.
"""
result = arrayfunc.amin(self.dectest , nosimd=True)
self.assertEqual(result, min(self.dectest))
########################################################
def test_amin_general_function_04(self):
"""Test amin - Array code d. Test finding max for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest , nosimd=True)
self.assertEqual(result, min(self.maxvaltest))
########################################################
def test_amin_general_function_05(self):
"""Test amin - Array code d. Test finding value from array that contains min for data type odd length array without SIMD.
"""
result = arrayfunc.amin(self.minvaltest , nosimd=True)
self.assertEqual(result, min(self.minvaltest))
########################################################
def test_amin_general_function_06(self):
"""Test amin - Array code d. Test optional maxlen parameter odd length array without SIMD.
"""
result = arrayfunc.amin(self.maxvaltest, maxlen=5 , nosimd=True)
self.assertEqual(result, min(self.maxvaltest[:5]))
##############################################################################
##############################################################################
class amin_parameter_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test amin for basic parameter tests.
op_template_params
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
self.gentest = array.array('d', [MaxVal // 2] * arraylength)
########################################################
def test_amin_param_function_01(self):
"""Test amin - Array code d. Test invalid parameter type odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(1 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(1)
########################################################
def test_amin_param_function_02(self):
"""Test amin - Array code d. Test missing parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin()
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min()
########################################################
def test_amin_param_function_03(self):
"""Test amin - Array code d. Test excess parameters odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, 5, 2, 2 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, 2)
########################################################
def test_amin_param_function_04(self):
"""Test amin - Array code d. Test invalid keyword parameter odd length array without SIMD.
"""
with self.assertRaises(TypeError):
result = arrayfunc.amin(self.gentest, xxxx=5 , nosimd=True)
# Check that the exception raised corresponds to the native Python behaviour.
with self.assertRaises(TypeError):
result = min(self.gentest, xxxx=5)
##############################################################################
##############################################################################
class amin_nonfinite_0_even_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 0 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_even_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 1 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_even_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 2 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_even_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 3 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_even_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 4 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 0 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 1 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 2 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 3 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_odd_arraysize_with_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 4 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_even_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 0 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_even_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 1 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_even_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 2 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_even_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 3 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_even_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with even length array data shifted 4 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 0 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 1 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 2 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 3 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_odd_arraysize_without_simd_f(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.f_max
MinVal = arrayfunc.arraylimits.f_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('f', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('f', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('f', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('f', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code f. Test NaN data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code f. Test Inf data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code f. Test Negative Inf data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code f. Test mixed non-finite data with odd length array data shifted 4 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_even_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 0 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_even_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 1 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_even_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 2 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_even_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 3 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_even_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 4 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 0 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 0 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 1 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 1 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 2 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 2 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 3 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 3 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_odd_arraysize_with_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_nan )
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_inf )
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 4 with SIMD.
"""
result = arrayfunc.amin(self.data_ninf )
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 4 with SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed )
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_even_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 0 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_even_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 1 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_even_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 2 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_even_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 3 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_even_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'even' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with even length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with even length array data shifted 4 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_0_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 0
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 0 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 0 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_1_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 1
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 1 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 1 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_2_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 2
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 2 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 2 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_3_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 3
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 3 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 3 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
class amin_nonfinite_4_odd_arraysize_without_simd_d(unittest.TestCase):
"""Test with floating point nan, inf -inf.
nonfinite_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
# We use a template to generate this code, so the following
# compare is inserted into the template to generate code which
# spills over past the SIMD handler.
if 'odd' == 'odd':
arrayextension = 5
else:
arrayextension = 0
arraylength = 96 + arrayextension
MaxVal = arrayfunc.arraylimits.d_max
MinVal = arrayfunc.arraylimits.d_min
# Create some test data containing a mixture of values.
halfrangedata = list(range(int(MinVal / 2), int(MaxVal / 2), int(MaxVal) - int(MinVal) // 100))
nanvaldatabase = list(itertools.chain(halfrangedata, [math.nan, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
infvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
ninfvaldatabase = list(itertools.chain(halfrangedata, [-math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
mixedvaldatabase = list(itertools.chain(halfrangedata, [math.inf, -math.inf, -10.0, MaxVal, 10.0, MinVal], halfrangedata))
# Rotate the values in place in order to create different combinations.
# This is being generated through a template to allow us to create
# different combinations to help test the effects of having the
# special values in various locations. This is primarily of use
# for the SIMD tests which do operations in parallel.
rotplaces = 4
nanvaldata = nanvaldatabase[rotplaces:] + nanvaldatabase[:rotplaces]
infvaldata = infvaldatabase[rotplaces:] + infvaldatabase[:rotplaces]
ninfvaldata = ninfvaldatabase[rotplaces:] + ninfvaldatabase[:rotplaces]
mixedvaldata = mixedvaldatabase[rotplaces:] + mixedvaldatabase[:rotplaces]
# Now create the arrays used in the tests.
self.data_nan = array.array('d', [x for x,y in zip(itertools.cycle(nanvaldata), range(arraylength))])
self.data_inf = array.array('d', [x for x,y in zip(itertools.cycle(infvaldata), range(arraylength))])
self.data_ninf = array.array('d', [x for x,y in zip(itertools.cycle(ninfvaldata), range(arraylength))])
self.data_mixed = array.array('d', [x for x,y in zip(itertools.cycle(mixedvaldata), range(arraylength))])
########################################################
def test_amin_nonfinite_nan_01(self):
"""Test amin - Array code d. Test NaN data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_nan , nosimd=True)
result2 = min(self.data_nan)
# We don't actually test the result as there is no meaningful order
# comparison with NaN.
########################################################
def test_amin_nonfinite_inf_02(self):
"""Test amin - Array code d. Test Inf data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_inf , nosimd=True)
self.assertEqual(result, min(self.data_inf))
########################################################
def test_amin_nonfinite_ninf_03(self):
"""Test amin - Array code d. Test Negative Inf data with odd length array data shifted 4 without SIMD.
"""
result = arrayfunc.amin(self.data_ninf , nosimd=True)
self.assertEqual(result, min(self.data_ninf))
########################################################
def test_amin_nonfinite_mixed_04(self):
"""Test amin - Array code d. Test mixed non-finite data with odd length array data shifted 4 without SIMD.
"""
# The mixed test does not include NaN, as there is no meaningful order
# comparison with NaN.
result = arrayfunc.amin(self.data_mixed , nosimd=True)
self.assertEqual(result, min(self.data_mixed))
##############################################################################
##############################################################################
if __name__ == '__main__':
# Check to see if the log file option has been selected. This is an option
# which we have added in order to decide where to output the results.
if '-l' in sys.argv:
# Remove the option from the argument list so that "unittest" does
# not complain about unknown options.
sys.argv.remove('-l')
with open('af_unittest.txt', 'a') as f:
f.write('\n\n')
f.write('amin\n\n')
trun = unittest.TextTestRunner(f)
unittest.main(testRunner=trun)
else:
unittest.main()
##############################################################################
| 37.012247
| 125
| 0.627567
| 57,484
| 480,530
| 5.168516
| 0.005776
| 0.037078
| 0.023695
| 0.03662
| 0.996314
| 0.996314
| 0.996314
| 0.996314
| 0.996281
| 0.996281
| 0
| 0.010029
| 0.147398
| 480,530
| 12,982
| 126
| 37.015098
| 0.715151
| 0.342347
| 0
| 0.969317
| 0
| 0
| 0.005378
| 0
| 0
| 0
| 0
| 0
| 0.1578
| 1
| 0.154612
| false
| 0
| 0.001793
| 0
| 0.183503
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e2ac36b0ca3e23db0a91d45b7b3309c5e79f1d7
| 4,522
|
py
|
Python
|
metrics.py
|
Rango-dz/Chefmate
|
ffff9c492e26089a7d7726b44e3d78394037d3fe
|
[
"MIT"
] | 7
|
2020-02-06T23:14:58.000Z
|
2020-05-08T23:39:35.000Z
|
metrics.py
|
Rango-dz/Chefmate
|
ffff9c492e26089a7d7726b44e3d78394037d3fe
|
[
"MIT"
] | 166
|
2020-02-12T23:24:32.000Z
|
2021-12-13T20:34:13.000Z
|
metrics.py
|
Rango-dz/Chefmate
|
ffff9c492e26089a7d7726b44e3d78394037d3fe
|
[
"MIT"
] | 1
|
2020-05-08T23:43:26.000Z
|
2020-05-08T23:43:26.000Z
|
#!/usr/bin/python
import sys
import datetime
def main(argv):
print "\n"
if str(argv[0]) == '1':
# AVG SEARCH TIME
usingDateRange = 0
if (len(argv) == 3):
usingDateRange = 1
try:
startDateObj = datetime.datetime.strptime(argv[1].strip().split(":")[0], '%Y/%m/%d')
startTimeInt = (int(argv[1].strip().split(":")[1]) * 60 * 60) + (int(argv[1].strip().split(":")[2]) * 60) + (int(argv[1].strip().split(":")[3]))
endDateObj = datetime.datetime.strptime(argv[2].strip().split(":")[0], '%Y/%m/%d')
endTimeInt = (int(argv[2].strip().split(":")[1]) * 60 * 60) + (int(argv[2].strip().split(":")[2]) * 60) + (int(argv[2].strip().split(":")[3]))
except (IndexError, ValueError):
print "Malformed Date Argument(s), use './metrics help' to see proper format\n"
return
file1 = open('./client/logs/searchTime.txt', 'r')
Lines = file1.readlines()
count = 0
total = 0
for line in Lines:
dateStr = line.strip().split(" - ")[0]
timeStr = dateStr.strip().split(" ")[1]
try:
currDateObj = datetime.datetime.strptime(dateStr.strip().split(" ")[0], '%Y/%m/%d')
currTimeInt = (int(timeStr.strip().split(":")[0]) * 60 * 60) + (int(timeStr.strip().split(":")[1]) * 60) + (int(timeStr.strip().split(":")[2]))
except (IndexError, ValueError):
print "Malformed entry in searchTime.txt, exitting..."
return
if (usingDateRange == 1):
if ((currDateObj < startDateObj) | (currDateObj > endDateObj)):
continue
if (currDateObj == startDateObj):
if (currTimeInt < startTimeInt):
continue
if (currDateObj == endDateObj):
if (currTimeInt > endTimeInt):
continue
total = total + float(line.strip().split(" - ")[1])
count = count + 1
if (count == 0):
print "No data available in the described date/time range"
else :
print 'avg search time (s): ', (total/float(count))/1000
print 'over ', count, ' datapoints'
elif (argv[0] == '2'):
# NUM OF SEARCHES
usingDateRange = 0
if (len(argv) == 3):
usingDateRange = 1
try:
startDateObj = datetime.datetime.strptime(argv[1].strip().split(":")[0], '%Y/%m/%d')
startTimeInt = (int(argv[1].strip().split(":")[1]) * 60 * 60) + (int(argv[1].strip().split(":")[2]) * 60) + (int(argv[1].strip().split(":")[3]))
endDateObj = datetime.datetime.strptime(argv[2].strip().split(":")[0], '%Y/%m/%d')
endTimeInt = (int(argv[2].strip().split(":")[1]) * 60 * 60) + (int(argv[2].strip().split(":")[2]) * 60) + (int(argv[2].strip().split(":")[3]))
except (IndexError, ValueError):
print "Malformed Date Argument(s), use './metrics help' to see proper format\n"
return
file1 = open('./client/logs/searchTime.txt', 'r')
Lines = file1.readlines()
count = 0
for line in Lines:
dateStr = line.strip().split(" - ")[0]
timeStr = dateStr.strip().split(" ")[1]
try:
currDateObj = datetime.datetime.strptime(dateStr.strip().split(" ")[0], '%Y/%m/%d')
currTimeInt = (int(timeStr.strip().split(":")[0]) * 60 * 60) + (int(timeStr.strip().split(":")[1]) * 60) + (int(timeStr.strip().split(":")[2]))
except (IndexError, ValueError):
print "Malformed entry in searchTime.txt, exitting..."
return
if (usingDateRange == 1):
if ((currDateObj < startDateObj) | (currDateObj > endDateObj)):
continue
if (currDateObj == startDateObj):
if (currTimeInt < startTimeInt):
continue
if (currDateObj == endDateObj):
if (currTimeInt > endTimeInt):
continue
count = count + 1
if (count == 0):
print "No data available in the described date/time range"
else :
print 'number of searches: ', (count)
print "\n"
if __name__ == "__main__":
main(sys.argv[1:])
| 45.22
| 160
| 0.488501
| 467
| 4,522
| 4.713062
| 0.188437
| 0.131758
| 0.049977
| 0.054521
| 0.890959
| 0.890959
| 0.890959
| 0.890959
| 0.890959
| 0.890959
| 0
| 0.036117
| 0.332596
| 4,522
| 100
| 161
| 45.22
| 0.693174
| 0.010615
| 0
| 0.860465
| 0
| 0
| 0.122093
| 0.012522
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.023256
| null | null | 0.127907
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2e3d1c70e97da4324b262fa012ab22f2dfcd4bdc
| 17
|
py
|
Python
|
test/2.py
|
nus-cs4215/x-slang-t3-ss-zlx
|
389ae814703dc41f89b8d28d557dffff8b83109f
|
[
"Apache-2.0"
] | null | null | null |
test/2.py
|
nus-cs4215/x-slang-t3-ss-zlx
|
389ae814703dc41f89b8d28d557dffff8b83109f
|
[
"Apache-2.0"
] | null | null | null |
test/2.py
|
nus-cs4215/x-slang-t3-ss-zlx
|
389ae814703dc41f89b8d28d557dffff8b83109f
|
[
"Apache-2.0"
] | null | null | null |
a = [1,3,7]
a[1]
| 8.5
| 12
| 0.352941
| 6
| 17
| 1
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 0.235294
| 17
| 2
| 13
| 8.5
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e54347e5fc21bdf61bc7b1f97c1dcfbd1b0972e
| 26,228
|
py
|
Python
|
metal_python/api/ip_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | 7
|
2020-12-21T05:24:24.000Z
|
2022-02-12T20:55:32.000Z
|
metal_python/api/ip_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | 6
|
2020-09-16T07:23:34.000Z
|
2022-01-18T12:05:30.000Z
|
metal_python/api/ip_api.py
|
metal-stack/metal-python
|
cdf40fa86d2b2944f9818cef1c6723b1eecc506e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
metal-api
API to manage and control plane resources like machines, switches, operating system images, machine sizes, networks, IP addresses and more # noqa: E501
OpenAPI spec version: v0.15.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal_python.api_client import ApiClient
class IpApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def allocate_ip(self, body, **kwargs): # noqa: E501
"""allocate an ip in the given network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_ip(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPAllocateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.allocate_ip_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.allocate_ip_with_http_info(body, **kwargs) # noqa: E501
return data
def allocate_ip_with_http_info(self, body, **kwargs): # noqa: E501
"""allocate an ip in the given network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_ip_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPAllocateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method allocate_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `allocate_ip`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip/allocate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1IPResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def allocate_specific_ip(self, ip, body, **kwargs): # noqa: E501
"""allocate a specific ip in the given network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_specific_ip(ip, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ip: ip to try to allocate (required)
:param V1IPAllocateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.allocate_specific_ip_with_http_info(ip, body, **kwargs) # noqa: E501
else:
(data) = self.allocate_specific_ip_with_http_info(ip, body, **kwargs) # noqa: E501
return data
def allocate_specific_ip_with_http_info(self, ip, body, **kwargs): # noqa: E501
"""allocate a specific ip in the given network. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_specific_ip_with_http_info(ip, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ip: ip to try to allocate (required)
:param V1IPAllocateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ip', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method allocate_specific_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ip' is set
if ('ip' not in params or
params['ip'] is None):
raise ValueError("Missing the required parameter `ip` when calling `allocate_specific_ip`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `allocate_specific_ip`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ip' in params:
path_params['ip'] = params['ip'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip/allocate/{ip}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1IPResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_i_ps(self, body, **kwargs): # noqa: E501
"""get all ips that match given properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_i_ps(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPFindRequest body: (required)
:return: list[V1IPResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_i_ps_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.find_i_ps_with_http_info(body, **kwargs) # noqa: E501
return data
def find_i_ps_with_http_info(self, body, **kwargs): # noqa: E501
"""get all ips that match given properties # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_i_ps_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPFindRequest body: (required)
:return: list[V1IPResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_i_ps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `find_i_ps`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip/find', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1IPResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_ip(self, id, **kwargs): # noqa: E501
"""get ip by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the ip (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_ip_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_ip_with_http_info(id, **kwargs) # noqa: E501
return data
def find_ip_with_http_info(self, id, **kwargs): # noqa: E501
"""get ip by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ip_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the ip (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_ip`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1IPResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def free_ip(self, id, **kwargs): # noqa: E501
"""frees an ip # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_ip(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the ip (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.free_ip_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.free_ip_with_http_info(id, **kwargs) # noqa: E501
return data
def free_ip_with_http_info(self, id, **kwargs): # noqa: E501
"""frees an ip # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_ip_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the ip (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method free_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `free_ip`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip/free/{id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1IPResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_i_ps(self, **kwargs): # noqa: E501
"""get all ips # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_i_ps(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1IPResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_i_ps_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_i_ps_with_http_info(**kwargs) # noqa: E501
return data
def list_i_ps_with_http_info(self, **kwargs): # noqa: E501
"""get all ips # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_i_ps_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1IPResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_i_ps" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1IPResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_ip(self, body, **kwargs): # noqa: E501
"""updates an ip. if the ip was changed since this one was read, a conflict is returned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ip(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPUpdateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_ip_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.update_ip_with_http_info(body, **kwargs) # noqa: E501
return data
def update_ip_with_http_info(self, body, **kwargs): # noqa: E501
"""updates an ip. if the ip was changed since this one was read, a conflict is returned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ip_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1IPUpdateRequest body: (required)
:return: V1IPResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_ip" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_ip`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/ip', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1IPResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 36.733894
| 156
| 0.591696
| 3,037
| 26,228
| 4.870925
| 0.062562
| 0.051376
| 0.026499
| 0.03407
| 0.956466
| 0.954506
| 0.94876
| 0.940039
| 0.930034
| 0.923815
| 0
| 0.018379
| 0.313329
| 26,228
| 713
| 157
| 36.785414
| 0.802998
| 0.313291
| 0
| 0.816537
| 0
| 0
| 0.166657
| 0.030389
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03876
| false
| 0
| 0.010336
| 0
| 0.105943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e8bbab47658ab8d5b9d564a50175ee76b3938e2
| 24,704
|
py
|
Python
|
ribo_plotting.py
|
borisz264/ribo_seq
|
ddcea3c913eab7e669baec1d39379bde656684b8
|
[
"MIT"
] | null | null | null |
ribo_plotting.py
|
borisz264/ribo_seq
|
ddcea3c913eab7e669baec1d39379bde656684b8
|
[
"MIT"
] | 1
|
2020-08-18T06:48:50.000Z
|
2020-08-18T10:30:52.000Z
|
ribo_plotting.py
|
borisz264/ribo_seq
|
ddcea3c913eab7e669baec1d39379bde656684b8
|
[
"MIT"
] | null | null | null |
import ribo_utils
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['pdf.fonttype'] = 42 #leaves most text as actual text in PDFs, not outlines
import os
import uniform_colormaps
import math
from matplotlib.ticker import AutoMinorLocator
def plot_fragment_length_distributions(experiment):
dfs = []
for lib in experiment.libs:
frag_dict = lib.get_all_CDS_fragment_length_counts()
frag_lengths = sorted(frag_dict.keys())
frag_length_counts = [frag_dict[length] for length in frag_lengths]
d = {'fragment length': frag_lengths, '# reads': frag_length_counts,
'% reads': 100. * np.array(frag_length_counts) / sum(frag_length_counts),
'sample': [lib.lib_settings.sample_name] * len(frag_length_counts)}
temp_df = pd.DataFrame(data=d)
dfs.append(temp_df)
print lib.lib_settings.sample_name, 'reads: ', sum(frag_length_counts)
frag_length_df = pd.concat(dfs)
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'fragment_length_distributions.tsv')
frag_length_df.to_csv(out_name, sep='\t')
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'fragment_length_percent_pivot.tsv')
frag_length_df.pivot(columns='sample', values ='% reads', index='fragment length').to_csv(out_name, sep='\t')
fig = plt.figure(figsize=(8, 5))
plots = []
plot = fig.add_subplot(111)
color_index = 0
group_df = frag_length_df.groupby(['sample'])
for lib in experiment.libs:
sample = lib.lib_settings.sample_name
df = group_df.get_group(sample)
df.plot(x='fragment length', y='% reads', ax=plot, color=ribo_utils.rainbow[color_index%len(ribo_utils.rainbow)], linestyle=ribo_utils.line_styles[color_index/len(ribo_utils.rainbow)], lw=2, sharex=True,
sharey=True, label=sample)
color_index += 1
plots.append(plot)
for plot in plots:
#major_xticks = range(12, 60, 3)
#plot.set_xticks(major_xticks, minor=False)
# plot.set_xticklabels(major_tick_labels)
plot.set_ylabel('% CDS-mapping reads', fontsize=20)
plot.set_xlabel('fragment length', fontsize=20)
# Hide the right and top spines
plot.spines['right'].set_visible(False)
plot.spines['top'].set_visible(False)
#plot.set_xlim(12, 57)
#plot.set_ylim(0, 65)
# try:
# plot.legend_.remove()
# except:
# pass
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'fragment_length_distributions.pdf')
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_readthrough_box(experiment, log = False):
fig = plt.figure(figsize=(8, 8))
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = 1
colormap = uniform_colormaps.viridis
plot_index = 0
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
data = []
legends = []
boxprops = dict(linewidth=2, color=ribo_utils.black)
for lib in experiment.libs:
sample_name = lib.lib_settings.sample_name
readthroughs = [tx.compute_readthrough_ratio(16, read_end='5p', read_lengths='all', cds_cutoff=128,
log=log, post_cds_start_buffer=12, pre_cds_stop_buffer=15,
pre_extension_stop_buffer=15, post_cds_stop_buffer=9) for
tx in lib.transcripts.values() if (not tx.compute_readthrough_ratio(16, read_end='5p', read_lengths='all', cds_cutoff=128,
log=log, post_cds_start_buffer=12, pre_cds_stop_buffer=15,
pre_extension_stop_buffer=15, post_cds_stop_buffer=9) == None) and tx.is_coding ]
data.append(readthroughs)
legends.append('%s (%d)' % (sample_name, len(readthroughs)))
# note that all but the last bin exclude the right (larger) edge of the bin. So I add an extra bin.
plot.boxplot(data, notch=True, boxprops=boxprops, autorange=True)
plot_index += 1
#plot.set_xlabel("fragment length", fontsize=8)
if log:
plot.set_ylabel("log10 readthrough fraction", fontsize=8)
else:
plot.set_ylabel("readthrough fraction", fontsize=8)
plot.set_xticklabels(legends, rotation=40, ha='right')
#plot.set_xlim(min_x, max(bins))
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
if log:
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'log_readthrough_box.pdf')
else:
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'readthrough_box.pdf')
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_frame_distributions(experiment, read_lengths = ['all', [28], [29], [30]], read_ends = ['5p', '3p']):
num_libs = len(experiment.libs)
num_plots_wide = len(read_lengths) * len(read_ends)
num_plots_high = num_libs
fig = plt.figure(figsize=(num_plots_wide, num_plots_high))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
for read_end in read_ends:
for read_length in read_lengths:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index+1)
sample_name = lib.lib_settings.sample_name
frame_counts = np.zeros(3)
offsets = {'5p':-15, '3p': 15}
offset = offsets[read_end]
for transcript in lib.transcripts.values():
if transcript.is_coding:
frame_counts = frame_counts + transcript.get_read_frame_counts(transcript.cds_start+offset, transcript.cds_end+offset, read_end=read_end, read_lengths=read_length)
# note that all but the last bin exclude the right (larger) edge of the bin. So I add an extra bin.
bar_corners = (np.arange(3)+0.25)*.5
bar_width = 0.5*.5
bar_centers = bar_corners + bar_width/2.0
plot.bar(bar_corners, frame_counts/sum(frame_counts), width=bar_width, label=sample_name, lw=0,
color=ribo_utils.black)
plot.set_ylim(0, 1)
plot.set_xticks(bar_centers)
plot.set_xticklabels([str(n) for n in range(3)])
#plot.set_xlabel("fragment length", fontsize=8)
plot.set_title('%s, %s' % (read_end, str(read_length)), fontsize=8)
if plot_index % num_plots_wide == 0:
plot.set_ylabel(sample_name, fontsize=8)
plt.setp(plot.get_xticklabels(), fontsize=7)
plt.setp(plot.get_yticklabels(), fontsize=7)
#plot.set_xlim(min_x, max(bins))
plot_index += 1
plt.subplots_adjust(wspace=0.5, hspace=0.5)
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
#plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'frame_ distributions.pdf')
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_start_codon_average(experiment, up = 100, down = 500, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
normed_count_sum = np.zeros(down+up+1)
inclusion_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
tx_count, tx_inclusion = transcript.get_CDS_read_counts_array(transcript.cds_start, -1 * up, down, read_end=read_end,
read_lengths=read_lengths)
normed_count_sum += tx_count/(float(cds_reads)/transcript.cds_length)
inclusion_sum += tx_inclusion
nt_positions = np.arange(-1*up, down+1)-0.5
plot.bar(nt_positions, normed_count_sum/inclusion_sum,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=0, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("relative to CDS start", fontsize=8)
plot.set_ylabel("average density\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1 * up, down)
plot.set_ylim(0, 8)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'start_codon_avg_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_second_stop_positions(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
sample_name = lib.lib_settings.sample_name
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
second_stop_positions = []
for transcript in lib.transcripts.values():
if transcript.is_coding:
cds_reads = transcript.get_cds_read_count(-15, 12, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
second_stop = transcript.second_stop_position()
if not second_stop == None:
second_stop_positions.append(second_stop+3-transcript.cds_end)
bins = range(-10, 200)
bins.append(1000000)
hist, bin_edges = np.histogram(second_stop_positions, bins)
plot.bar(bin_edges[:-1]-0.5, hist,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=0, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("second in-frame stop relative to CDS stop", fontsize=8)
plot.set_ylabel("# genes", fontsize=8)
plot.set_xlim(-10, 200)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'second_stop_positions.pdf')
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_stop_codon_average(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
'''
:param experiment:
:param up:
:param down:
:param min_cds_reads:
:param read_end:
:param read_lengths:
:return:
'''
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
normed_count_sum = np.zeros(down+up+1)
inclusion_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
tx_count, tx_inclusion = transcript.get_CDS_read_counts_array(transcript.cds_end, -1 * up, down, read_end=read_end,
read_lengths=read_lengths)
normed_count_sum += tx_count/(float(cds_reads)/transcript.cds_length)
inclusion_sum += tx_inclusion
nt_positions = np.arange(-1*up, down+1)-0.5
plot.bar(nt_positions, normed_count_sum/inclusion_sum,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=0, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("nt relative to stop codon", fontsize=8)
plot.set_ylabel("average density\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1*up, down)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'stop_codon_avg_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_first_exon_average(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
normed_count_sum = np.zeros(down+up+1)
inclusion_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding:
if not transcript.get_first_jxn_in_CDS() == None:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
tx_count, tx_inclusion = transcript.get_CDS_read_counts_array(transcript.get_first_jxn_in_CDS(), -1 * up, down, read_end=read_end,
read_lengths=read_lengths)
normed_count_sum += tx_count/(float(cds_reads)/transcript.cds_length)
inclusion_sum += tx_inclusion
nt_positions = np.arange(-1*up, down+1)-0.5
plot.bar(nt_positions, normed_count_sum/inclusion_sum,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=0, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("relative to second exon start", fontsize=8)
plot.set_ylabel("average density\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1*up, down)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'first_ej_avg_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_stop_positional_read_lengths(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
length_sum = np.zeros(down+up+1)
count_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
length_sum_array, counts_array = transcript.get_avg_read_lengths_array(transcript.cds_end, -1*up, down,
read_end=read_end)
length_sum += length_sum_array
count_sum += counts_array
nt_positions = np.arange(-1*up, down+1)
plot.plot(nt_positions, length_sum/count_sum,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=1, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("relative to CDS stop", fontsize=8)
plot.set_ylabel("avg read length\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1*up, down)
#plot.set_ylim(25, 35)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'stop_lengths_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_start_positional_read_lengths(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
length_sum = np.zeros(down+up+1)
count_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
length_sum_array, counts_array = transcript.get_avg_read_lengths_array(transcript.cds_start, -1*up, down,
read_end=read_end)
length_sum += length_sum_array
count_sum += counts_array
nt_positions = np.arange(-1*up, down+1)
plot.plot(nt_positions, length_sum/count_sum,
color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=1, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("relative to CDS stop", fontsize=8)
plot.set_ylabel("avg read length\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1*up, down)
#plot.set_ylim(25, 35)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'start_lengths_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def plot_first_exon_positional_read_lengths(experiment, up = 500, down = 100, min_cds_reads = 128, read_end='5p', read_lengths='all'):
num_libs = len(experiment.libs)
num_plots_wide = 1
num_plots_high = num_libs
fig = plt.figure(figsize=(8, 2*num_libs))
colormap = uniform_colormaps.viridis
plot_index = 0
for lib in experiment.libs:
plot = fig.add_subplot(num_plots_high, num_plots_wide, plot_index + 1)
sample_name = lib.lib_settings.sample_name
length_sum = np.zeros(down+up+1)
count_sum = np.zeros(down + up + 1)
for transcript in lib.transcripts.values():
if transcript.is_coding and not transcript.get_first_jxn_in_CDS() == None:
if read_end == '5p':
start_offset = -15
stop_offset = -12
elif read_end == '3p':
start_offset = 14
stop_offset = 18
cds_reads = transcript.get_cds_read_count(start_offset, stop_offset, read_end=read_end, read_lengths=read_lengths)
if cds_reads >= min_cds_reads:
length_sum_array, counts_array = transcript.get_avg_read_lengths_array(transcript.get_first_jxn_in_CDS(), -1*up, down,
read_end=read_end)
length_sum += length_sum_array
count_sum += counts_array
nt_positions = np.arange(-1*up, down+1)
plot.plot(nt_positions, length_sum/count_sum, color=colormap((plot_index - 1) / float(len(experiment.libs))), lw=1, label=sample_name)
plot.set_title(sample_name, fontsize=8)
plot_index += 1
if plot_index == num_libs:
plot.set_xlabel("relative to CDS stop", fontsize=8)
plot.set_ylabel("avg read length\n (read %s end)" % (read_end), fontsize=8)
plot.set_xlim(-1*up, down)
#plot.set_ylim(25, 35)
minorLocator = AutoMinorLocator(10)
plot.xaxis.set_minor_locator(minorLocator)
plot.get_xaxis().set_tick_params(which='both', direction='out')
plot.get_yaxis().set_tick_params(which='both', direction='out')
#lg = plt.legend(loc=2, prop={'size': 12}, labelspacing=0.2)
#lg.draw_frame(False)
plt.tight_layout()
out_name = os.path.join(experiment.settings.get_rdir(), 'plots', 'first_ej_lengths_%s_%s.pdf' %(read_end, str(read_lengths)))
plt.savefig(out_name, transparent='True', format='pdf')
plt.clf()
def codon_metaplots(library):
pass
| 51.790356
| 211
| 0.625405
| 3,330
| 24,704
| 4.359459
| 0.084685
| 0.031343
| 0.017221
| 0.017566
| 0.803472
| 0.794517
| 0.780258
| 0.76717
| 0.762279
| 0.756975
| 0
| 0.025131
| 0.260687
| 24,704
| 476
| 212
| 51.89916
| 0.769711
| 0.057683
| 0
| 0.698795
| 0
| 0
| 0.053445
| 0.012517
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.00241
| 0.019277
| null | null | 0.00241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf06f5be426b2b8344be483145f6546a3509eb60
| 269
|
py
|
Python
|
py_toolbelt/other_toolbelt.py
|
soamicharan/py_toolbelt
|
b2800a750bb554368b2ccf040e79c262c4b24e1e
|
[
"MIT"
] | null | null | null |
py_toolbelt/other_toolbelt.py
|
soamicharan/py_toolbelt
|
b2800a750bb554368b2ccf040e79c262c4b24e1e
|
[
"MIT"
] | null | null | null |
py_toolbelt/other_toolbelt.py
|
soamicharan/py_toolbelt
|
b2800a750bb554368b2ccf040e79c262c4b24e1e
|
[
"MIT"
] | null | null | null |
def is_not_none(object=None, **kwargs):
return (kwargs.get('on_true', True) if object is not None else kwargs.get('on_false', False))
def is_none(object=None, **kwargs):
return (kwargs.get('on_true', True) if object is None else kwargs.get('on_false', False))
| 44.833333
| 97
| 0.710037
| 46
| 269
| 4
| 0.282609
| 0.195652
| 0.23913
| 0.217391
| 0.913043
| 0.913043
| 0.913043
| 0.597826
| 0.597826
| 0.597826
| 0
| 0
| 0.133829
| 269
| 5
| 98
| 53.8
| 0.7897
| 0
| 0
| 0
| 0
| 0
| 0.111524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
cf243122998055b6fd029372f55c21addf4d6784
| 11,008
|
py
|
Python
|
hypixel_chinese_skyblock_bot/Commands/VerifyId.py
|
whats2000/hypixel-chinese-skyblock-bot
|
0e20fbcb5aaf7e5ca0558f7600a1337aa6a9990c
|
[
"MIT"
] | 1
|
2021-06-23T16:32:54.000Z
|
2021-06-23T16:32:54.000Z
|
hypixel_chinese_skyblock_bot/Commands/VerifyId.py
|
whats2000/hypixel-chinese-skyblock-bot
|
0e20fbcb5aaf7e5ca0558f7600a1337aa6a9990c
|
[
"MIT"
] | null | null | null |
hypixel_chinese_skyblock_bot/Commands/VerifyId.py
|
whats2000/hypixel-chinese-skyblock-bot
|
0e20fbcb5aaf7e5ca0558f7600a1337aa6a9990c
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
from hypixel_chinese_skyblock_bot.Core.Common import CodExtension, get_hypixel_api, get_setting_json, set_user_id, \
get_verify_id_list
from hypixel_chinese_skyblock_bot.Core.UserData import UserData
class VerifyId(CodExtension):
@commands.command()
async def verifyid(self, ctx, args=None):
# check is in the desired channel.
if ctx.channel.id == get_setting_json('VerifyIdChannelId'):
# check is player input its id
if args is not None:
# check is player has been verified
if get_setting_json('VerifyIdRole') not in [y.name.lower() for y in ctx.message.author.roles]:
player = get_verify_id_list(ctx.message.author)
player_data = UserData(player)
player_data.api = get_hypixel_api(args)
print('> verify player user : ' + str(ctx.message.author))
# check get hypixel api is successes
if player_data.api['success']:
print('> get hypixel api success')
player_data.set_latest_user_api()
# try to get player social media discord
try:
player_data.discord = player_data.api['player']['socialMedia']['links']['DISCORD']
# check user name is correct in api
if str(ctx.message.author) == player_data.discord:
set_user_id(ctx.message.author, args)
print('- Verify Id success')
embed = discord.Embed(
title='成功驗證',
description=str(ctx.message.author) + ' ---> ' + args,
color=0x00ff00
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed)
role = discord.utils.get(ctx.message.author.guild.roles,
name=get_setting_json('VerifyIdRole'))
await ctx.author.add_roles(role)
else:
print('> Player not found')
embed = discord.Embed(
title='驗證失敗,玩家id不正確',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
except KeyError:
print('> The player do not open the social media')
embed = discord.Embed(
title='驗證失敗,請先打開discord api',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Please wait a little bit and try again')
embed = discord.Embed(
title='驗證失敗,請稍後重試',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Has already verified')
embed = discord.Embed(
title='你已經驗證,更新請用sb?verifyidupdate',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Required id')
embed = discord.Embed(
title='你需要在指令後方加上自己的 id',
description=str(ctx.message.author) + ' -x-> ?',
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Wrong channel')
embed = discord.Embed(
title='請在正確頻道輸入',
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
await ctx.message.delete()
@commands.command()
async def verifyidupdate(self, ctx, args=None):
# check is in the desired channel.
if ctx.channel.id == get_setting_json('VerifyIdChannelId'):
# check is player input its id
if args is not None:
player = get_verify_id_list(ctx.message.author)
player_data = UserData(player)
player_data.api = get_hypixel_api(args)
print('> update player user : ' + str(ctx.message.author))
# check is player has been verified
if get_setting_json('VerifyIdRole') in [y.name.lower() for y in ctx.message.author.roles]:
# check get hypixel api is successes
if player_data.api['success']:
print('> get hypixel api success')
player_data.set_latest_user_api()
# try to get player social media discord
try:
player_data.discord = player_data.api['player']['socialMedia']['links']['DISCORD']
# check user name is correct in api
if str(ctx.message.author) == player_data.discord:
set_user_id(ctx.message.author, args)
print('> update Id success')
embed = discord.Embed(
title='成功更新',
description=str(ctx.message.author) + ' ---> ' + args,
color=0x00ff00
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed)
else:
print('> Player not found')
embed = discord.Embed(
title='驗證失敗,玩家id不正確',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
except KeyError:
print('> The player do not open the social media')
embed = discord.Embed(
title='驗證失敗,請先打開 hypixel discord api',
description=str(ctx.message.author) + ' -x-> ' + args,
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Please wait a little bit and try again')
print('> fail reason : ' + player_data.api['cause'])
embed = discord.Embed(
title='驗證失敗,請稍後重試',
description=str(ctx.message.author) + ' -x-> ' + args + '\n\n' + '原因 : ' + player_data.api[
'cause'],
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Required id')
embed = discord.Embed(
title='你需要在指令後方加上自己的 id',
description=str(ctx.message.author) + ' -x-> ?',
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
else:
print('> Wrong channel')
embed = discord.Embed(
title='請在正確頻道輸入',
color=0xe74c3c
)
embed.set_author(
name=ctx.message.author.name,
icon_url=ctx.message.author.avatar_url
)
await ctx.send(embed=embed, delete_after=20.0)
await ctx.message.delete()
def setup(pybot):
pybot.add_cog(VerifyId(pybot))
| 38.760563
| 119
| 0.421966
| 945
| 11,008
| 4.795767
| 0.131217
| 0.110327
| 0.169462
| 0.062886
| 0.877979
| 0.877979
| 0.849735
| 0.836937
| 0.836937
| 0.836937
| 0
| 0.015735
| 0.497729
| 11,008
| 283
| 120
| 38.897527
| 0.803943
| 0.036973
| 0
| 0.728643
| 0
| 0
| 0.078485
| 0.00255
| 0
| 0
| 0.009822
| 0
| 0
| 1
| 0.005025
| false
| 0
| 0.020101
| 0
| 0.030151
| 0.090452
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf38ba43094e48c1d4f128798069b61c16b3156b
| 2,305
|
py
|
Python
|
uuidbase62/tests/test_dependencies.py
|
jaddison/fastapi-uuidbase62
|
e43bfcfb0d1b2546bace124e296bfac9065560c2
|
[
"MIT"
] | 1
|
2022-03-30T19:01:10.000Z
|
2022-03-30T19:01:10.000Z
|
uuidbase62/tests/test_dependencies.py
|
jaddison/fastapi-uuidbase62
|
e43bfcfb0d1b2546bace124e296bfac9065560c2
|
[
"MIT"
] | null | null | null |
uuidbase62/tests/test_dependencies.py
|
jaddison/fastapi-uuidbase62
|
e43bfcfb0d1b2546bace124e296bfac9065560c2
|
[
"MIT"
] | null | null | null |
import uuid
import fastapi
import pytest
from pydantic import BaseModel
from uuidbase62 import (
UUIDBase62,
UUIDBase62ModelMixin,
con_uuidbase62,
get_validated_uuidbase62,
get_validated_uuidbase62_by_model,
)
class Item(UUIDBase62ModelMixin, BaseModel):
client_id: con_uuidbase62(prefix="my_prefix") # type: ignore
def test_get_validated_uuidbase62_by_model_function__returns_callable():
func = get_validated_uuidbase62_by_model(Item, "client_id", "item_id")
assert callable(func)
def test_get_validated_uuidbase62_by_model_function__with_valid_input__returns_valid_uuidbase62_value():
prefix = "my_prefix"
value = f"{prefix}_7yNMTpVy8ddRxYKGJqtk7e"
func = get_validated_uuidbase62_by_model(Item, "client_id", "item_id")
uuidbase62_value = func(value)
assert isinstance(uuidbase62_value, UUIDBase62)
assert uuidbase62_value.value == value
assert uuidbase62_value.prefix == prefix
assert isinstance(uuidbase62_value.uuid, uuid.UUID)
def test_get_validated_uuidbase62_function__returns_callable():
func = get_validated_uuidbase62("item_id")
assert callable(func)
def test_get_validated_uuidbase62_function__with_valid_input__returns_valid_uuidbase62_value():
prefix = "my_prefix"
value = f"{prefix}_7yNMTpVy8ddRxYKGJqtk7e"
func = get_validated_uuidbase62("item_id", prefix)
uuidbase62_value = func(value)
assert isinstance(uuidbase62_value, UUIDBase62)
assert uuidbase62_value.value == value
assert uuidbase62_value.prefix == prefix
assert isinstance(uuidbase62_value.uuid, uuid.UUID)
def test_get_validated_uuidbase62_function__with_invalid_input__returns_valid_uuidbase62_value():
value = f"my_prefix_7yNMTpVy8ddRxYKGJqtk7e"
func = get_validated_uuidbase62("item_id", "different_prefix")
with pytest.raises(fastapi.exceptions.HTTPException) as e:
func(value)
assert e.value.status_code == 404
def test_get_validated_uuidbase62_by_model_function__with_invalid_input__returns_valid_uuidbase62_value():
value = f"different_prefix_7yNMTpVy8ddRxYKGJqtk7e"
func = get_validated_uuidbase62_by_model(Item, "client_id", "item_id")
with pytest.raises(fastapi.exceptions.HTTPException) as e:
func(value)
assert e.value.status_code == 404
| 31.148649
| 106
| 0.790022
| 280
| 2,305
| 6.039286
| 0.164286
| 0.09935
| 0.182141
| 0.09935
| 0.845062
| 0.827912
| 0.827912
| 0.794796
| 0.729746
| 0.704317
| 0
| 0.045272
| 0.137527
| 2,305
| 73
| 107
| 31.575342
| 0.805332
| 0.005206
| 0
| 0.510204
| 0
| 0
| 0.10694
| 0.058053
| 0
| 0
| 0
| 0
| 0.244898
| 1
| 0.122449
| false
| 0
| 0.102041
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf61d27de57659933f48f34b5668468eca784274
| 51,214
|
py
|
Python
|
foo/wx/wx_items.py
|
ThomasZh/legend-club-wxpub
|
a10dbe5c25dda8e85826a0039c41406e1044af0a
|
[
"Apache-2.0"
] | 2
|
2017-05-12T04:20:07.000Z
|
2018-04-18T02:48:34.000Z
|
foo/wx/wx_items.py
|
ThomasZh/legend-club-wxpub
|
a10dbe5c25dda8e85826a0039c41406e1044af0a
|
[
"Apache-2.0"
] | null | null | null |
foo/wx/wx_items.py
|
ThomasZh/legend-club-wxpub
|
a10dbe5c25dda8e85826a0039c41406e1044af0a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# _*_ coding: utf-8_*_
#
# Copyright 2016 planc2c.com
# dev@tripc2c.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tornado.web
import logging
import uuid
import time
import re
import json as JSON # 启用别名,不会跟方法里的局部变量混淆
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../"))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../dao"))
from tornado.escape import json_encode, json_decode
from tornado.httpclient import HTTPClient
from tornado.httputil import url_concat
from bson import json_util
from comm import *
from dao import budge_num_dao
from dao import category_dao
from dao import activity_dao
from dao import group_qrcode_dao
from dao import cret_template_dao
from dao import bonus_template_dao
from dao import bonus_dao
from dao import apply_dao
from dao import order_dao
from dao import group_qrcode_dao
from dao import vendor_member_dao
from dao import voucher_dao
from dao import insurance_template_dao
from dao import contact_dao
from dao import vendor_hha_dao
from dao import voucher_pay_dao
from dao import vendor_wx_dao
from dao import voucher_order_dao
from dao import trip_router_dao
from dao import triprouter_share_dao
from dao import club_dao
from dao import activity_share_dao
from dao import vendor_wx_dao
from foo.wx import wx_wrap
from xml_parser import parseWxOrderReturn, parseWxPayReturn
from global_const import *
# 俱乐部首页
class WxItemsIndexHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("got club_id %r", club_id)
access_token = self.get_access_token()
guest_id = DEFAULT_USER_ID
if len(club_id) == 32:
guest_id = DEFAULT_USER_ID
elif len(club_id) == 64:
guest_id = club_id[32:64]
club_id = club_id[0:32]
else:
guest_id = club_id[32:64]
club_id = club_id[0:32]
logging.info("got club_id=[%r]", club_id)
logging.info("got guest_id=[%r]", guest_id)
# club = self.get_club_basic_info(club_id)
# logging.info("got club %r", club)
url = API_DOMAIN+"/api/clubs/"+club_id
http_client = HTTPClient()
response = http_client.fetch(url, method="GET")
logging.info("got response %r", response.body)
data = json_decode(response.body)
club = data['rs']
if not club.has_key('img'):
club['img'] = ''
if not club.has_key('paragraphs'):
club['paragraphs'] = ''
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/index"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
self.render('items/main.html',
api_domain = API_DOMAIN,
access_token=access_token,
club_id = club_id,
club=club,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 默认分类列表
class WxItemsCategoryListDefaultHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("GET %r", self.request.uri)
club_id = CLUB_ID
last_visit_club_id = self.get_cookie("last_visit_club_id")
logging.info("got last_visit_club_id=[%r]", last_visit_club_id)
if last_visit_club_id == None:
last_visit_club_id = club_id
self.set_cookie("last_visit_club_id", last_visit_club_id)
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/category/items")
else:
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/category/items")
# 分类列表
class WxItemsCategoryListHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("GET %r", self.request.uri)
self.set_cookie("last_visit_club_id", club_id)
guest_id = DEFAULT_USER_ID
if len(club_id) == 32:
guest_id = DEFAULT_USER_ID
elif len(club_id) == 64:
guest_id = club_id[32:64]
club_id = club_id[0:32]
else:
guest_id = club_id[32:64]
club_id = club_id[0:32]
logging.info("got club_id=[%r]", club_id)
logging.info("got guest_id=[%r]", guest_id)
category_id = self.get_argument("category_id", "")
logging.info("got category_id %r", category_id)
second_category_id = self.get_argument("second_category_id", "")
logging.info("got second_category_id %r", second_category_id)
# 查询分类
access_token = self.get_access_token()
logging.info("GET access_token=[%r]", access_token)
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
club = self.get_club_basic_info(club_id)
logging.info("get club %r",club)
league_id = club['league_id']
url = API_DOMAIN + "/api/def/leagues/"+ league_id +"/categories"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.debug("got categorys response.body %r", response.body)
data = json_decode(response.body)
categorys = data['rs']
if not category_id:
category_id = categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/" + category_id + "/level2"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.debug("got second_categorys response.body %r", response.body)
data = json_decode(response.body)
second_categorys = data['rs']
# 获取商品数量 /api/clubs/([a-z0-9]*)/cart/nums
url = API_DOMAIN + "/api/clubs/" + club_id + "/cart/nums"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.debug("got cart_goods_num response.body %r", response.body)
data = json_decode(response.body)
cart_goods_num = data['data']['quantity']
logging.info("got cart_goods_num %r", cart_goods_num)
second_specs = None
second_brands = None
if not second_category_id:
second_category_id = second_categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/specs"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.debug("got second_specs response.body %r", response.body)
data = json_decode(response.body)
second_specs = data['rs']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/brands"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.debug("got second_brands response.body %r", response.body)
data = json_decode(response.body)
second_brands = data['rs']
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
club = self.get_club_basic_info(club_id)
self.render('items/category.html',
API_DOMAIN=API_DOMAIN,
access_token=access_token,
club=club,
club_id=club_id,
category_id=category_id,
second_category_id=second_category_id,
second_categorys=second_categorys,
second_specs=second_specs,
second_brands=second_brands,
categorys=categorys,
cart_goods_num=cart_goods_num,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 规格分类列表
class WxItemsCategorySpecsListHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id, spec_id):
logging.info("GET %r", self.request.uri)
category_id = self.get_argument("category_id", "")
logging.info("got category_id %r", category_id)
second_category_id = self.get_argument("second_category_id", "")
logging.info("got second_category_id %r", second_category_id)
# 查询分类
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
club = self.get_club_basic_info(club_id)
logging.info("get club %r",club)
league_id = club['league_id']
url = API_DOMAIN + "/api/def/leagues/"+ league_id +"/categories"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
categorys = data['rs']
if not category_id:
category_id = categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/" + category_id + "/level2"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_categorys = data['rs']
# 获取商品数量 /api/clubs/([a-z0-9]*)/cart/nums
url = API_DOMAIN + "/api/clubs/" + club_id + "/cart/nums"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
cart_goods_num = data['data']['quantity']
logging.info("got cart_goods_num %r", cart_goods_num)
second_specs = None
second_brands = None
if not second_category_id:
second_category_id = second_categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/specs"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_specs = data['rs']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/brands"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_brands = data['rs']
url = API_DOMAIN + "/api/def/specs/"+spec_id
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
_spec = data['rs']
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
club = self.get_club_basic_info(club_id)
self.render('items/category-specs.html',
API_DOMAIN=API_DOMAIN,
access_token=access_token,
club=club,
club_id=club_id,
spec_id=spec_id,
category_id=category_id,
second_category_id=second_category_id,
second_categorys=second_categorys,
second_specs=second_specs,
second_brands=second_brands,
categorys=categorys,
_spec=_spec,
cart_goods_num=cart_goods_num,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 品牌分类列表
class WxItemsCategoryBrandsListHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id, brand_id):
logging.info("GET %r", self.request.uri)
category_id = self.get_argument("category_id", "")
logging.info("got category_id %r", category_id)
second_category_id = self.get_argument("second_category_id", "")
logging.info("got second_category_id %r", second_category_id)
# 查询分类
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
club = self.get_club_basic_info(club_id)
logging.info("get club %r",club)
league_id = club['league_id']
url = API_DOMAIN + "/api/def/leagues/"+ league_id +"/categories"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
categorys = data['rs']
if not category_id:
category_id = categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/" + category_id + "/level2"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_categorys = data['rs']
# 获取商品数量 /api/clubs/([a-z0-9]*)/cart/nums
url = API_DOMAIN + "/api/clubs/" + club_id + "/cart/nums"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
cart_goods_num = data['data']['quantity']
logging.info("got cart_goods_num %r", cart_goods_num)
second_specs = None
second_brands = None
if not second_category_id:
second_category_id = second_categorys[0]['_id']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/specs"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_specs = data['rs']
url = API_DOMAIN + "/api/def/categories/"+ second_category_id +"/brands"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
second_brands = data['rs']
url = API_DOMAIN + "/api/def/brands/"+brand_id
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
_brand = data['rs']
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
club = self.get_club_basic_info(club_id)
self.render('items/category-brands.html',
API_DOMAIN=API_DOMAIN,
access_token=access_token,
club=club,
club_id=club_id,
brand_id=brand_id,
category_id=category_id,
second_category_id=second_category_id,
second_categorys=second_categorys,
second_specs=second_specs,
second_brands=second_brands,
categorys=categorys,
_brand=_brand,
cart_goods_num=cart_goods_num,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# old分类列表
class WxItemsListHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
# 查询分类
_array = category_dao.category_dao().query_by_vendor(club_id)
logging.info("got categories=[%r]", _array)
category_num = len(_array)
logging.info("got category_num", category_num)
cart_goods = self.get_cart(club_id)
logging.info("got cart_goods %r", cart_goods)
# 获取商品数量
cart_goods_num = 0
for cart_good in cart_goods:
cart_goods_num += cart_good['quantity']
logging.info("got cart_goods_num %r", cart_goods_num)
club = self.get_club_basic_info(club_id)
private = 0
items = self.get_items(club_id, ACTIVITY_STATUS_RECRUIT, private)
logging.info("GET items %r", items)
for item in items:
# 格式化价格
item['amount'] = float(item['amount']) / 100
self.render('items/main.html',
club=club,
club_id=club_id,
items=items,
cart_goods_num=cart_goods_num,
category_num=category_num)
# 产品详情
class WxItemsDetailHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self,club_id,item_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_secure_cookie("access_token")
club = self.get_club_basic_info(club_id)
item = self.get_item(item_id)
logging.info("got item %r", item)
# url = API_DOMAIN + "/api/def/categories/"+ activity['level2_category_id'] +"/specs"
# http_client = HTTPClient()
# headers = {"Authorization":"Bearer " + access_token}
# response = http_client.fetch(url, method="GET", headers=headers)
# logging.info("got response.body %r", response.body)
# data = json_decode(response.body)
# specs = data['rs']
url = API_DOMAIN + "/api/items/"+ item_id +"/specs"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
item_specs = data['rs']
for item_spec in item_specs:
item_spec['amount'] = float(item_spec['amount']) / 100
# 获取购物车商品详情
cart_goods = self.get_cart(club_id)
logging.info("got cart_goods %r", cart_goods)
# 获取商品数量
cart_goods_num = 0
for cart_good in cart_goods:
cart_goods_num += cart_good['quantity']
logging.info("got cart_goods_num %r", cart_goods_num)
# 获取产品说明
article = self.get_article(item_id)
if not article:
article = {'_id':item_id, 'title':item['title'], 'subtitle':[], 'img':item['img'],'paragraphs':''}
self.create_article(article)
logging.info("got article %r", article)
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
self.render('items/prodetail.html',
api_domain= API_DOMAIN,
access_token=access_token,
cart_goods_num=cart_goods_num,
club=club,
club_id=club_id,
item_id=item_id,
item=item,
item_specs=item_specs,
article=article,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 添加商品到购物车
# @tornado.web.authenticated # if no session, redirect to login page
# def post(self, club_id, item_id):
# logging.info("got club_id %r in uri", club_id)
# access_token = self.get_secure_cookie("access_token")
#
# fee_template_id = self.get_argument('fee_template_id',"")
# logging.info("got fee_template_id %r in uri", fee_template_id)
# product_num = self.get_argument('product_num',"")
# logging.info("got product_num %r in uri", product_num)
#
# item_type = [{"item_id":item_id, "fee_template_id":fee_template_id, "quantity":product_num}]
# headers = {"Authorization":"Bearer "+access_token}
#
# url = API_DOMAIN + "/api/clubs/"+ club_id +"/cart/items"
# _json = json_encode(item_type)
# http_client = HTTPClient()
# response = http_client.fetch(url, method="POST", headers=headers, body=_json)
# logging.info("update item response.body=[%r]", response.body)
#
# self.redirect('/bf/wx/vendors/'+ club_id +'/items/'+item_id)
# 默认购物车
class WxItemsCartDefaultHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("GET %r", self.request.uri)
last_visit_club_id = self.get_cookie("last_visit_club_id")
logging.info("got last_visit_club_id=[%r]", last_visit_club_id)
if last_visit_club_id == None:
last_visit_club_id = CLUB_ID
self.set_cookie("last_visit_club_id", last_visit_club_id)
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/items/cart")
else:
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/items/cart")
# 购物车
class WxItemsCartHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_secure_cookie("access_token")
self.set_cookie("last_visit_club_id", club_id)
self.render('items/cart.html',api_domain=API_DOMAIN,club_id=club_id,access_token=access_token)
# 提交订单页
class WxItemsSubmitOrderHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_secure_cookie("access_token")
account_id = self.get_secure_cookie("account_id")
club = self.get_club_basic_info(club_id)
logging.info("get club %r",club)
league_id = club['league_id']
self.render('items/submit-order.html',api_domain=API_DOMAIN,league_id=league_id, club_id=club_id,access_token=access_token,account_id=account_id)
# 调用wechat pay
class WxItemsOrderCheckoutHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def post(self):
club_id = self.get_argument("club_id", "")
logging.info("got club_id %r", club_id)
_account_id = self.get_secure_cookie("account_id")
guest_club_id = self.get_argument("guest_club_id")
logging.info("got guest_club_id %r", guest_club_id)
access_token = self.get_access_token()
item_id = "00000000000000000000000000000000"
# 取得自己的最后一笔订单
params = {"filter":"account", "account_id":_account_id, "page":1, "limit":1,}
url = url_concat(API_DOMAIN + "/api/orders", params)
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
rs = data['rs']
orders = rs['data']
_timestamp = int(time.time())
# 一分钟内不能创建第二个订单,
# 防止用户点击回退按钮,产生第二个订单
if len(orders) > 0:
for order in orders:
if (_timestamp - order['create_time']) < 60:
self.redirect('/bf/wx/orders/wait')
return
#购物车商品json
items = self.get_body_argument("items", [])
logging.info("got items %r", items)
items = JSON.loads(items)
logging.info("got items %r", items)
#收获地址
addr = self.get_argument("addr", {})
logging.info("got addr %r", addr)
addr = JSON.loads(addr)
logging.info("got addr %r", addr)
#是否需要发票
billing = self.get_argument("billing",'0')
logging.info("got billing %r", billing)
billing_addr = {'tfn':'','company_title':''}
# 发票信息
if billing == '1':
_addr = self.get_argument("billing_addr",{})
logging.info("got _addr %r", _addr)
billing_addr = JSON.loads(_addr)
logging.info("got billing_addr %r", billing_addr)
coupon = self.get_argument('coupon',0)
logging.info("got coupon %r",coupon)
coupon = JSON.loads(coupon)
# 积分
used_points = self.get_argument('used_points',0)
logging.info("got used_points %r",used_points)
order_id = str(uuid.uuid1()).replace('-', '')
# 创建订单索引
order_index = {
"_id": order_id,
"order_type": "buy_item",
"club_id": club_id,
"item_type": "items",
"item_id": item_id,
"item_name": "", # 由服务器端填写第一个商品名称
"distributor_type": "item",
"items":items,
"shipping_addr":addr,
"shipping_cost":0, # 由服务器端计算运费
"billing_required":billing,
"billing_addr":billing_addr,
"coupon":coupon,
"distributor_id": "00000000000000000000000000000000",
"create_time": _timestamp,
"pay_type": "wxpay",
"pay_status": ORDER_STATUS_BF_INIT,
"quantity": 0, # 由服务器端计算商品数量
"amount": 0, # 由服务器端计算商品合计
"actual_payment": 0, # 由服务器端计算实际支付金额
"base_fees": [], #基本服务
"ext_fees": [], # 附加服务项编号数组
"insurances": [], # 保险选项,数组
"vouchers": [], #代金券选项,数组
"points_used": used_points, # 使用积分数量
"bonus_points": 0, # 购买商品获得奖励积分
"booking_time": _timestamp,
}
pay_id = self.create_order(order_index)
order = self.get_symbol_object(order_id)
logging.info("GET order %r", order)
order['create_time'] = timestamp_datetime(float(order['create_time']))
# order['shipping_cost'] = float(order['shipping_cost'])/100
# order['actual_payment'] = float(order['actual_payment'])/100
items = order['items']
_product_description = items[0]['title']
logging.info("GET items %r", items)
shipping_addr = order['shipping_addr']
logging.info("GET shipping_addr %r", shipping_addr)
billing_addr = order['billing_addr']
logging.info("GET billing_addr %r", billing_addr)
# 清空购物车
headers = {"Authorization":"Bearer "+access_token}
url = API_DOMAIN + "/api/clubs/"+ club_id +"/cart/items"
http_client = HTTPClient()
response = http_client.fetch(url, method="DELETE", headers=headers)
logging.info("update item response.body=[%r]", response.body)
# budge_num increase
self.counter_increase(club_id, "item_order")
# self.counter_increase(order_id, "order")
# TODO notify this message to vendor's administrator by SMS
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
logging.info("got wx_app_id %r in uri", wx_app_id)
wx_app_secret = wx_app_info['wx_app_secret']
wx_mch_key = wx_app_info['wx_mch_key']
wx_mch_id = wx_app_info['wx_mch_id']
wx_notify_domain = wx_app_info['wx_notify_domain']
# 如使用积分抵扣,则将积分减去
if order_index['points_used'] > 0:
# 修改个人积分信息
bonus_points = {
'org_id':club_id,
'org_type':'club',
'account_id':_account_id,
'account_type':'user',
'action': 'buy_item',
'item_type': 'items',
'item_id': order['item_id'],
'item_name': order['item_name'],
'bonus_type':'bonus',
'points': used_points,
'order_id': order_index['_id']
}
self.create_points(bonus_points)
if order['actual_payment'] != 0:
# wechat 统一下单
myinfo = self.get_myinfo_login()
_openid = myinfo['login']
_store_id = 'Aplan'
logging.info("got _store_id %r", _store_id)
#_ip = self.request.remote_ip
_remote_ip = self.request.headers['X-Real-Ip']
_order_return = wx_wrap.getUnifiedOrder(_remote_ip, wx_app_id, _store_id, _product_description, wx_notify_domain, wx_mch_id, wx_mch_key, _openid, pay_id, order['actual_payment'], _timestamp)
# wx统一下单记录保存
_order_return['_id'] = _order_return['prepay_id']
self.create_symbol_object(_order_return)
# 微信统一下单返回成功
order_unified = None
if(_order_return['return_msg'] == 'OK'):
order_unified = {'_id':order_id,'prepay_id': _order_return['prepay_id'], 'pay_status': ORDER_STATUS_WECHAT_UNIFIED_SUCCESS}
else:
order_unified = {'_id':order_id,'prepay_id': _order_return['prepay_id'], 'pay_status': ORDER_STATUS_WECHAT_UNIFIED_FAILED}
# 微信统一下单返回成功
# TODO: 更新订单索引中,订单状态pay_status,prepay_id
self.update_order_unified(order_unified)
for item in items:
item['amount'] = float(item['amount'])/100
self.render('items/order-confirm.html',
access_token = access_token,
api_domain = API_DOMAIN,
shipping_addr=shipping_addr,
billing_addr=billing_addr,
club_id=club_id,
return_msg=response.body, order_return=_order_return,
order=order, items=items, )
# self.redirect('/bf/wx/vendors/'+ club_id +'/items/checkout/orders/'+order_id)
else: #actual_payment == 0:
# send message to wx 公众号客户 by template
wx_access_token = wx_wrap.getAccessTokenByClientCredential(WX_APP_ID, WX_APP_SECRET)
logging.info("got wx_access_token %r", wx_access_token)
# 通过wxpub,给俱乐部操作员发送通知
ops = self.get_club_ops_wx(club_id)
for op in ops:
wx_openid = op['binding_id']
logging.info("got wx_openid %r", wx_openid)
if order_index['order_type'] == "buy_activity":
wx_wrap.sendActivityOrderPayedToOpsMessage(wx_access_token, WX_NOTIFY_DOMAIN, wx_openid, order_index)
elif order_index['order_type'] == "buy_item":
logging.info("sendItemOrderPayedToOpsMessage=[%r]", WX_MESSAGE_TEMPLATE)
if WX_MESSAGE_TEMPLATE == "kkfcps":
wx_wrap.sendItemOrderPayedToOpsMessage_kkfcps(wx_access_token, WX_NOTIFY_DOMAIN, wx_openid, order_index)
else:
wx_wrap.sendItemOrderPayedToOpsMessage(wx_access_token, WX_NOTIFY_DOMAIN, wx_openid, order_index)
self.render('items/order-result.html',
api_domain=API_DOMAIN,
club_id=club_id,
items=items,
shipping_addr=shipping_addr,
billing_addr=billing_addr,
access_token=access_token,
order_id=order['_id'],
order=order)
# 下单成功后的订单详情
class WxItemsOrderResultHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id, order_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_secure_cookie("access_token")
order = self.get_symbol_object(order_id)
logging.info("GET order %r", order)
pay_status = order['pay_status']
order['create_time'] = timestamp_datetime(float(order['create_time']))
items = order['items']
logging.info("GET items %r", items)
shipping_addr= order['shipping_addr']
billing_addr= order['billing_addr']
self.render('items/order-result.html',
api_domain=API_DOMAIN,
club_id=club_id,
items=items,
shipping_addr=shipping_addr,
billing_addr=billing_addr,
access_token=access_token,
order_id=order_id,
order=order)
# 重新支付订单操作
class WxOrdersCheckoutHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def post(self):
club_id = self.get_argument("club_id", "")
logging.info("got club_id %r", club_id)
_account_id = self.get_secure_cookie("account_id")
order_id = self.get_argument("order_id","")
logging.info("got order_id %r",order_id)
access_token = self.get_access_token()
item_id = "00000000000000000000000000000000"
guest_club_id = "00000000000000000000000000000000"
# 取得自己的最后一笔订单
params = {"filter":"account", "account_id":_account_id, "page":1, "limit":1,}
url = url_concat(API_DOMAIN + "/api/orders", params)
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
rs = data['rs']
orders = rs['data']
_timestamp = time.time()
# 一分钟内不能创建第二个订单,
# 防止用户点击回退按钮,产生第二个订单
if len(orders) > 0:
for order in orders:
if (_timestamp - order['create_time']) < 60:
self.redirect('/bf/wx/orders/wait')
return
# 更改pay_id
url = API_DOMAIN + "/api/orders/"+order_id+"/payid"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
_json = json_encode(headers)
response = http_client.fetch(url, method="POST", headers=headers, body=_json)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
rs = data['rs']
pay_id = rs['pay_id']
order = self.get_symbol_object(order_id)
_product_description = order['items'][0]['title']
actual_payment = order['actual_payment']
_timestamp = (int)(time.time())
items = order['items']
order['create_time'] = timestamp_datetime(float(order['create_time']))
# order['shipping_cost'] = float(order['shipping_cost'])/100
# order['actual_payment'] = float(order['actual_payment'])/100
shipping_addr = order['shipping_addr']
logging.info("GET shipping_addr %r", shipping_addr)
billing_addr = order['billing_addr']
logging.info("GET billing_addr %r", billing_addr)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
logging.info("got wx_app_id %r in uri", wx_app_id)
wx_app_secret = wx_app_info['wx_app_secret']
wx_mch_key = wx_app_info['wx_mch_key']
wx_mch_id = wx_app_info['wx_mch_id']
wx_notify_domain = wx_app_info['wx_notify_domain']
_timestamp = (int)(time.time())
if actual_payment != 0:
# wechat 统一下单
myinfo = self.get_myinfo_login()
_openid = myinfo['login']
_store_id = 'Aplan'
logging.info("got _store_id %r", _store_id)
#_ip = self.request.remote_ip
_remote_ip = self.request.headers['X-Real-Ip']
_order_return = wx_wrap.getUnifiedOrder(_remote_ip, wx_app_id, _store_id, _product_description, wx_notify_domain, wx_mch_id, wx_mch_key, _openid, pay_id, actual_payment, _timestamp)
# wx统一下单记录保存
_order_return['_id'] = _order_return['prepay_id']
self.create_symbol_object(_order_return)
# 微信统一下单返回成功
order_unified = None
if(_order_return['return_msg'] == 'OK'):
order_unified = {'_id':order_id,'prepay_id': _order_return['prepay_id'], 'pay_status': ORDER_STATUS_WECHAT_UNIFIED_SUCCESS}
else:
order_unified = {'_id':order_id,'prepay_id': _order_return['prepay_id'], 'pay_status': ORDER_STATUS_WECHAT_UNIFIED_FAILED}
# 微信统一下单返回成功
# TODO: 更新订单索引中,订单状态pay_status,prepay_id
self.update_order_unified(order_unified)
self.render('items/re-order-confirm.html',
access_token = access_token,
api_domain = API_DOMAIN,
shipping_addr=shipping_addr,
billing_addr=billing_addr,
club_id=club_id,
return_msg=response.body, order_return=_order_return,
order=order, items=items)
# 默认个人订单列表
class WxItemsMyordersDefaultHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("GET %r", self.request.uri)
last_visit_club_id = self.get_cookie("last_visit_club_id")
logging.info("got last_visit_club_id=[%r]", last_visit_club_id)
if last_visit_club_id == None:
last_visit_club_id = CLUB_ID
self.set_cookie("last_visit_club_id", last_visit_club_id)
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/items/myorders")
else:
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/items/myorders")
# 订单中心-所有订单
class WxItemsMyordersHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
self.set_cookie("last_visit_club_id", club_id)
self.render('items/myorders.html',
club_id=club_id,
API_DOMAIN=API_DOMAIN,
access_token=access_token)
# 订单中心-已支付订单
class WxItemsPayMyordersHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
self.render('items/pay-myorders.html',
club_id=club_id,
API_DOMAIN=API_DOMAIN,
access_token=access_token)
# 订单中心-未支付订单
class WxItemsNopayMyordersHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
self.render('items/nopay-myorders.html',
club_id=club_id,
API_DOMAIN=API_DOMAIN,
access_token=access_token)
# 默认预估分类列表
class WxItemsRecommendListDefaultHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self):
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^ ^^^^^")
logging.info("GET %r", self.request.uri)
last_visit_club_id = self.get_cookie("last_visit_club_id")
logging.info("got last_visit_club_id=[%r]", last_visit_club_id)
if last_visit_club_id == None:
last_visit_club_id = CLUB_ID
self.set_cookie("last_visit_club_id", last_visit_club_id)
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/recommend")
else:
self.redirect("/bf/wx/vendors/"+ last_visit_club_id +"/recommend")
# 预估分类列表
class WxItemsRecommendListHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id):
logging.info("GET %r", self.request.uri)
self.set_cookie("last_visit_club_id", club_id)
# 查询分类
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
club = self.get_club_basic_info(club_id)
league_id = club['league_id']
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
self.render('items/recommend-category.html',
API_DOMAIN=API_DOMAIN,
access_token=access_token,
LEAGUE_ID=league_id,
club=club,
club_id=club_id,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 预估商品列表
class WxItemsRecommendProductsHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, club_id, recommend_category_id):
logging.info("GET %r", self.request.uri)
logging.info("GET club_id %r", club_id)
access_token = self.get_access_token()
logging.info("GET access_token %r", access_token)
logging.info("got recommend_category_id %r", recommend_category_id)
club = self.get_club_basic_info(club_id)
my_account_id = self.get_secure_cookie("account_id")
logging.info("GET my_account_id=[%r]", my_account_id)
wx_app_info = vendor_wx_dao.vendor_wx_dao().query(club_id)
wx_app_id = wx_app_info['wx_app_id']
wx_app_secret = wx_app_info['wx_app_secret']
wx_notify_domain = wx_app_info['wx_notify_domain']
logging.info("got wx_app_info=[%r]", wx_app_info)
wx_access_token = wx_wrap.getAccessTokenByClientCredential(wx_app_id, wx_app_secret)
_jsapi_ticket = wx_wrap.getJsapiTicket(wx_access_token)
_url = wx_notify_domain + self.request.uri
share_url = wx_notify_domain + "/bf/wx/vendors/"+club_id+my_account_id+"/category/items"
_sign = wx_wrap.Sign(_jsapi_ticket, _url).sign()
logging.info("got sign=[%r]", _sign)
self.render('items/recommend-products.html',
api_domain=API_DOMAIN,
club_id=club_id,
club=club,
access_token=access_token,
recommend_category_id=recommend_category_id,
wx_app_id=wx_app_id,
share_url=share_url,
sign=_sign)
# 我的历史积分列表页
class WxItemsUserPointsHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, vendor_id):
logging.info("got vendor_id %r in uri", vendor_id)
account_id = self.get_secure_cookie("account_id")
access_token = self.get_access_token()
# 获取当前积分
url = API_DOMAIN + "/api/clubs/"+vendor_id+"/users/" + account_id
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
_customer_profile = data['rs']
bonus_num = _customer_profile['remaining_points']
self.render('items/user-points.html',
api_domain = API_DOMAIN,
access_token = access_token,
account_id = account_id,
vendor_id=vendor_id,
bonus_num=bonus_num)
# 我的上下线
class WxItemsUserlinesHandler(AuthorizationHandler):
@tornado.web.authenticated # if no session, redirect to login page
def get(self, vendor_id):
logging.info("got vendor_id %r in uri", vendor_id)
account_id = self.get_secure_cookie("account_id")
access_token = self.get_access_token()
# 上级
url = API_DOMAIN + "/api/clubs/"+vendor_id+"/acquaintance/"+account_id+"/higher"
http_client = HTTPClient()
headers = {"Authorization":"Bearer " + access_token}
response = http_client.fetch(url, method="GET", headers=headers)
logging.info("got response.body %r", response.body)
data = json_decode(response.body)
higher = data['rs']
if higher:
higher['ctime'] = timestamp_datetime(float(higher['ctime']))
self.render('items/user-lines.html',
vendor_id=vendor_id,
account_id=account_id,
higher=higher,
api_domain=API_DOMAIN,
access_token = access_token)
| 41.875715
| 202
| 0.617077
| 6,263
| 51,214
| 4.72106
| 0.066741
| 0.037946
| 0.04072
| 0.022321
| 0.817708
| 0.798363
| 0.788724
| 0.775365
| 0.758726
| 0.740835
| 0
| 0.006394
| 0.257898
| 51,214
| 1,222
| 203
| 41.909984
| 0.771589
| 0.082126
| 0
| 0.738095
| 0
| 0
| 0.163607
| 0.012721
| 0
| 0
| 0
| 0.000818
| 0
| 1
| 0.02381
| false
| 0
| 0.042208
| 0
| 0.091991
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cf88a501e95a2b8e4166f19911a9f931488f412e
| 202
|
py
|
Python
|
imagedt/tensorflow/__init__.py
|
Ken2yLiu/ImageDT
|
2fb08ed67c94c690ab5845a949d58d8fb3ff4ee5
|
[
"Apache-2.0"
] | null | null | null |
imagedt/tensorflow/__init__.py
|
Ken2yLiu/ImageDT
|
2fb08ed67c94c690ab5845a949d58d8fb3ff4ee5
|
[
"Apache-2.0"
] | null | null | null |
imagedt/tensorflow/__init__.py
|
Ken2yLiu/ImageDT
|
2fb08ed67c94c690ab5845a949d58d8fb3ff4ee5
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
from __future__ import print_function
# from . import network
# from . import optim
# from . import trainer
from . import tools
from . import lite
| 22.444444
| 38
| 0.777228
| 28
| 202
| 5.25
| 0.5
| 0.340136
| 0.217687
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005952
| 0.168317
| 202
| 9
| 39
| 22.444444
| 0.869048
| 0.381188
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf8a2652a9a354d866562077cf19c09ca5dddb78
| 52,904
|
py
|
Python
|
source/deepsecurity/api/rulesets_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:09.000Z
|
2021-10-30T16:40:09.000Z
|
source/deepsecurity/api/rulesets_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-07-28T20:19:03.000Z
|
2021-07-28T20:19:03.000Z
|
source/deepsecurity/api/rulesets_api.py
|
felipecosta09/cloudone-workload-controltower-lifecycle
|
7927c84d164058b034fc872701b5ee117641f4d1
|
[
"Apache-2.0"
] | 1
|
2021-10-30T16:40:02.000Z
|
2021-10-30T16:40:02.000Z
|
# coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from deepsecurity.api_client import ApiClient
class RulesetsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_ruleset(self, ruleset, softwareinventoryid, api_version, **kwargs): # noqa: E501
"""Create a Shared Ruleset # noqa: E501
Create a new shared ruleset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ruleset(ruleset, softwareinventoryid, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Ruleset ruleset: The settings of the new ruleset. (required)
:param int softwareinventoryid: ID of the software inventory to base the ruleset on. (required)
:param str api_version: The version of the api being called. (required)
:return: Ruleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_ruleset_with_http_info(ruleset, softwareinventoryid, api_version, **kwargs) # noqa: E501
else:
(data) = self.create_ruleset_with_http_info(ruleset, softwareinventoryid, api_version, **kwargs) # noqa: E501
return data
def create_ruleset_with_http_info(self, ruleset, softwareinventoryid, api_version, **kwargs): # noqa: E501
"""Create a Shared Ruleset # noqa: E501
Create a new shared ruleset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ruleset_with_http_info(ruleset, softwareinventoryid, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Ruleset ruleset: The settings of the new ruleset. (required)
:param int softwareinventoryid: ID of the software inventory to base the ruleset on. (required)
:param str api_version: The version of the api being called. (required)
:return: Ruleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset', 'softwareinventoryid', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset' is set
if ('ruleset' not in params or
params['ruleset'] is None):
raise ValueError("Missing the required parameter `ruleset` when calling `create_ruleset`") # noqa: E501
# verify the required parameter 'softwareinventoryid' is set
if ('softwareinventoryid' not in params or
params['softwareinventoryid'] is None):
raise ValueError("Missing the required parameter `softwareinventoryid` when calling `create_ruleset`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `create_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'softwareinventoryid' in params:
query_params.append(('softwareinventoryid', params['softwareinventoryid'])) # noqa: E501
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'ruleset' in params:
body_params = params['ruleset']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Ruleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_ruleset(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Delete a Ruleset # noqa: E501
Delete a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ruleset(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_ruleset_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.delete_ruleset_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
return data
def delete_ruleset_with_http_info(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Delete a Ruleset # noqa: E501
Delete a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ruleset_with_http_info(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `delete_ruleset`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_ruleset`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `delete_ruleset`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_ruleset_rule(self, ruleset_id, rule_id, api_version, **kwargs): # noqa: E501
"""Delete a Ruleset Rule # noqa: E501
Delete a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ruleset_rule(ruleset_id, rule_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.delete_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, **kwargs) # noqa: E501
return data
def delete_ruleset_rule_with_http_info(self, ruleset_id, rule_id, api_version, **kwargs): # noqa: E501
"""Delete a Ruleset Rule # noqa: E501
Delete a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'rule_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_ruleset_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `delete_ruleset_rule`") # noqa: E501
# verify the required parameter 'rule_id' is set
if ('rule_id' not in params or
params['rule_id'] is None):
raise ValueError("Missing the required parameter `rule_id` when calling `delete_ruleset_rule`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_ruleset_rule`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `delete_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
if 'rule_id' in params and not re.search('\\d+', str(params['rule_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `rule_id` when calling `delete_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
if 'rule_id' in params:
path_params['ruleID'] = params['rule_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}/rules/{ruleID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_ruleset(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Describe a Ruleset # noqa: E501
Describe a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_ruleset(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: Ruleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_ruleset_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_ruleset_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
return data
def describe_ruleset_with_http_info(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Describe a Ruleset # noqa: E501
Describe a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_ruleset_with_http_info(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: Ruleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `describe_ruleset`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_ruleset`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `describe_ruleset`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Ruleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_ruleset_rule(self, ruleset_id, rule_id, api_version, **kwargs): # noqa: E501
"""Describe a Ruleset Rule # noqa: E501
Describe a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_ruleset_rule(ruleset_id, rule_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, **kwargs) # noqa: E501
return data
def describe_ruleset_rule_with_http_info(self, ruleset_id, rule_id, api_version, **kwargs): # noqa: E501
"""Describe a Ruleset Rule # noqa: E501
Describe a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_ruleset_rule_with_http_info(ruleset_id, rule_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'rule_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_ruleset_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `describe_ruleset_rule`") # noqa: E501
# verify the required parameter 'rule_id' is set
if ('rule_id' not in params or
params['rule_id'] is None):
raise ValueError("Missing the required parameter `rule_id` when calling `describe_ruleset_rule`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_ruleset_rule`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `describe_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
if 'rule_id' in params and not re.search('\\d+', str(params['rule_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `rule_id` when calling `describe_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
if 'rule_id' in params:
path_params['ruleID'] = params['rule_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}/rules/{ruleID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationControlRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_ruleset_rules(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""List Ruleset Rules # noqa: E501
List all rules of a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_ruleset_rules(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRules
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_ruleset_rules_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.list_ruleset_rules_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
return data
def list_ruleset_rules_with_http_info(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""List Ruleset Rules # noqa: E501
List all rules of a ruleset by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_ruleset_rules_with_http_info(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRules
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_ruleset_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `list_ruleset_rules`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_ruleset_rules`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `list_ruleset_rules`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}/rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationControlRules', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_rulesets(self, api_version, **kwargs): # noqa: E501
"""List Rulesets # noqa: E501
Lists all rulesets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_rulesets(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: Rulesets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_rulesets_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.list_rulesets_with_http_info(api_version, **kwargs) # noqa: E501
return data
def list_rulesets_with_http_info(self, api_version, **kwargs): # noqa: E501
"""List Rulesets # noqa: E501
Lists all rulesets. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_rulesets_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: Rulesets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_rulesets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_rulesets`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Rulesets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_ruleset_rule(self, ruleset_id, rule_id, rule, api_version, **kwargs): # noqa: E501
"""Modify a Ruleset Rule # noqa: E501
Modify a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_ruleset_rule(ruleset_id, rule_id, rule, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to modify. (required)
:param ApplicationControlRule rule: The settings of the rule to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_ruleset_rule_with_http_info(ruleset_id, rule_id, rule, api_version, **kwargs) # noqa: E501
else:
(data) = self.modify_ruleset_rule_with_http_info(ruleset_id, rule_id, rule, api_version, **kwargs) # noqa: E501
return data
def modify_ruleset_rule_with_http_info(self, ruleset_id, rule_id, rule, api_version, **kwargs): # noqa: E501
"""Modify a Ruleset Rule # noqa: E501
Modify a ruleset rule by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_ruleset_rule_with_http_info(ruleset_id, rule_id, rule, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param int rule_id: The ID number of the rule to modify. (required)
:param ApplicationControlRule rule: The settings of the rule to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: ApplicationControlRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'rule_id', 'rule', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_ruleset_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `modify_ruleset_rule`") # noqa: E501
# verify the required parameter 'rule_id' is set
if ('rule_id' not in params or
params['rule_id'] is None):
raise ValueError("Missing the required parameter `rule_id` when calling `modify_ruleset_rule`") # noqa: E501
# verify the required parameter 'rule' is set
if ('rule' not in params or
params['rule'] is None):
raise ValueError("Missing the required parameter `rule` when calling `modify_ruleset_rule`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_ruleset_rule`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `modify_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
if 'rule_id' in params and not re.search('\\d+', str(params['rule_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `rule_id` when calling `modify_ruleset_rule`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
if 'rule_id' in params:
path_params['ruleID'] = params['rule_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'rule' in params:
body_params = params['rule']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}/rules/{ruleID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationControlRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_ruleset_rules(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Search Ruleset Rules # noqa: E501
Search for ruleset rules using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ruleset_rules(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: ApplicationControlRules
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_ruleset_rules_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.search_ruleset_rules_with_http_info(ruleset_id, api_version, **kwargs) # noqa: E501
return data
def search_ruleset_rules_with_http_info(self, ruleset_id, api_version, **kwargs): # noqa: E501
"""Search Ruleset Rules # noqa: E501
Search for ruleset rules using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_ruleset_rules_with_http_info(ruleset_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ruleset_id: The ID number of the ruleset. (required)
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: ApplicationControlRules
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ruleset_id', 'api_version', 'search_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_ruleset_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ruleset_id' is set
if ('ruleset_id' not in params or
params['ruleset_id'] is None):
raise ValueError("Missing the required parameter `ruleset_id` when calling `search_ruleset_rules`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_ruleset_rules`") # noqa: E501
if 'ruleset_id' in params and not re.search('\\d+', str(params['ruleset_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ruleset_id` when calling `search_ruleset_rules`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ruleset_id' in params:
path_params['rulesetID'] = params['ruleset_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/{rulesetID}/rules/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationControlRules', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_rulesets(self, api_version, **kwargs): # noqa: E501
"""Search Rulesets # noqa: E501
Search for rulesets using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_rulesets(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: Rulesets
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_rulesets_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.search_rulesets_with_http_info(api_version, **kwargs) # noqa: E501
return data
def search_rulesets_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Search Rulesets # noqa: E501
Search for rulesets using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_rulesets_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: Rulesets
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version', 'search_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_rulesets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_rulesets`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/rulesets/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Rulesets', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.725151
| 311
| 0.611447
| 6,173
| 52,904
| 5.008586
| 0.033857
| 0.053044
| 0.029756
| 0.025875
| 0.97005
| 0.967689
| 0.964875
| 0.96122
| 0.956174
| 0.951679
| 0
| 0.017129
| 0.299259
| 52,904
| 1,156
| 312
| 45.764706
| 0.81687
| 0.319768
| 0
| 0.797806
| 0
| 0
| 0.247057
| 0.050946
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032915
| false
| 0
| 0.00627
| 0
| 0.087774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9932f0bdbc8b24b8e65bdfec0f0290b6c959e317
| 63
|
py
|
Python
|
samples/src/main/resources/datasets/python/89.py
|
sritchie/kotlingrad
|
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
|
[
"Apache-2.0"
] | 11
|
2020-12-19T01:19:44.000Z
|
2021-12-25T20:43:33.000Z
|
src/main/resources/datasets/python/89.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | null | null | null |
src/main/resources/datasets/python/89.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | 2
|
2021-01-25T07:59:20.000Z
|
2021-08-07T07:13:49.000Z
|
def test1(a, b, c):
del a
del a, b, c
del a.x, b.y
| 12.6
| 19
| 0.444444
| 16
| 63
| 1.75
| 0.5
| 0.428571
| 0.214286
| 0.428571
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.396825
| 63
| 4
| 20
| 15.75
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
996fcbcbbe11a986a965b67441161f9137d85690
| 17,877
|
py
|
Python
|
tests/pretty_printers.py
|
hakatashi/oj
|
cb156ff6d0a122a3865280c90dddf8871e6ff7cc
|
[
"MIT"
] | null | null | null |
tests/pretty_printers.py
|
hakatashi/oj
|
cb156ff6d0a122a3865280c90dddf8871e6ff7cc
|
[
"MIT"
] | null | null | null |
tests/pretty_printers.py
|
hakatashi/oj
|
cb156ff6d0a122a3865280c90dddf8871e6ff7cc
|
[
"MIT"
] | null | null | null |
"""This module has unit tests for onlinejudge_command.pretty_printers module.
"""
import textwrap
import unittest
from typing import *
from onlinejudge_command.output_comparators import CompareMode
from onlinejudge_command.pretty_printers import _LineDiffOp, _make_diff_between_file_and_file, _PrettyToken, _PrettyTokenType, _render_tokens, _tokenize_file_content_without_snipping, _tokenize_large_file_content, _tokenize_line, _tokenize_pretty_diff
class TokenizeLineTest(unittest.TestCase):
def test_simple(self) -> None:
line = 'hello\n'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_crlf(self) -> None:
line = 'hello\r\n'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\r\n'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_with_whitespace(self) -> None:
line = 'hello \t\tworld\n'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' \t\t'),
_PrettyToken(_PrettyTokenType.BODY, 'world'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_without_newline(self) -> None:
line = 'hello'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_trailing_whitespace(self) -> None:
line = 'hello \n'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.HINT, '(trailing whitespace)'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_only_newline(self) -> None:
line = '\n'
expected = [
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
def test_empty_string(self) -> None:
line = ''
expected: List[_PrettyToken] = []
actual = _tokenize_line(line=line)
self.assertEqual(actual, expected)
class TokenizeLargeFileContentTest(unittest.TestCase):
def test_small(self) -> None:
content = b'hello\nworld\n'
limit = 40
head = 20
tail = 10
char_in_line = 40
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
_PrettyToken(_PrettyTokenType.BODY, 'world'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_large_file_content(content=content, limit=limit, head=head, tail=tail, char_in_line=char_in_line)
self.assertEqual(actual, expected)
def test_too_many_chars(self) -> None:
content_chars = 100000
content = b'hello' * (content_chars // len(b'hello'))
limit = 40
head = 20
tail = 10
char_in_line = 40
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello' * (head * char_in_line // len('hello'))),
_PrettyToken(_PrettyTokenType.HINT, '... ({} chars) ...'.format(content_chars - head * char_in_line - tail * char_in_line)),
_PrettyToken(_PrettyTokenType.BODY, 'hello' * (tail * char_in_line // len('hello'))),
_PrettyToken(_PrettyTokenType.HINT, '(no trailing newline)'),
]
actual = _tokenize_large_file_content(content=content, limit=limit, head=head, tail=tail, char_in_line=char_in_line)
self.assertEqual(actual, expected)
def test_too_many_lines(self) -> None:
content_lines = 100
content = b'hello\n' * content_lines
limit = 40
head = 20
tail = 10
char_in_line = 40
expected = []
for _ in range(head):
expected += [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
expected += [
_PrettyToken(_PrettyTokenType.HINT, '... ({} lines) ...\n'.format(content_lines - head - tail)),
]
for _ in range(tail):
expected += [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_large_file_content(content=content, limit=limit, head=head, tail=tail, char_in_line=char_in_line)
self.assertEqual(actual, expected)
def test_empty(self) -> None:
content = b''
limit = 40
head = 20
tail = 10
char_in_line = 40
expected = [
_PrettyToken(_PrettyTokenType.HINT, '(empty)'),
]
actual = _tokenize_large_file_content(content=content, limit=limit, head=head, tail=tail, char_in_line=char_in_line)
self.assertEqual(actual, expected)
def test_only_newlines(self) -> None:
content = b'\r\n\n'
limit = 40
head = 20
tail = 10
char_in_line = 40
expected = [
_PrettyToken(_PrettyTokenType.NEWLINE, '\r\n'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
_PrettyToken(_PrettyTokenType.HINT, '(only newline)'),
]
actual = _tokenize_large_file_content(content=content, limit=limit, head=head, tail=tail, char_in_line=char_in_line)
self.assertEqual(actual, expected)
class TokenizeFileContentWithoutSnippingTest(unittest.TestCase):
def test_small(self) -> None:
content = b'hello\nworld\n'
expected = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
_PrettyToken(_PrettyTokenType.BODY, 'world'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
actual = _tokenize_file_content_without_snipping(content=content)
self.assertEqual(actual, expected)
def test_empty(self) -> None:
content = b''
expected = [
_PrettyToken(_PrettyTokenType.HINT, '(empty)'),
]
actual = _tokenize_file_content_without_snipping(content=content)
self.assertEqual(actual, expected)
def test_only_newlines(self) -> None:
content = b'\r\n\n'
expected = [
_PrettyToken(_PrettyTokenType.NEWLINE, '\r\n'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
_PrettyToken(_PrettyTokenType.HINT, '(only newline)'),
]
actual = _tokenize_file_content_without_snipping(content=content)
self.assertEqual(actual, expected)
class RenderTokensTest(unittest.TestCase):
def test_simple(self) -> None:
tokens = [
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
_PrettyToken(_PrettyTokenType.BODY, 'world'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]
expected = ''.join([
'<bold>hello</bold>',
'<dim>\n</dim>',
'<bold>world</bold>',
'<dim>\n</dim>',
])
font_dim = lambda s: '<dim>' + s + '</dim>'
font_bold = lambda s: '<bold>' + s + '</bold>'
actual = _render_tokens(tokens=tokens, font_bold=font_bold, font_dim=font_dim)
self.assertEqual(actual, expected)
def test_complicated(self) -> None:
tokens = [
_PrettyToken(_PrettyTokenType.BODY, 'hello world'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' \t'),
_PrettyToken(_PrettyTokenType.HINT, 'this is a hint message'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\r\n'),
]
expected = ''.join([
'<bold>hello world</bold>',
'<dim>_\\t</dim>',
'<dim>this is a hint message</dim>',
'<dim>\\r\n</dim>',
])
font_dim = lambda s: '<dim>' + s + '</dim>'
font_bold = lambda s: '<bold>' + s + '</bold>'
actual = _render_tokens(tokens=tokens, font_bold=font_bold, font_dim=font_dim)
self.assertEqual(actual, expected)
class MakeDiffBetweenFileAndFileTest(unittest.TestCase):
def test_word_by_word(self) -> None:
a = ''.join([
'1 2 3\n',
'4 -1\n',
'6\n',
])
b = ''.join([
'1 2 3\n',
'4 5\n',
'6\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
expected = [
_LineDiffOp(lineno=1, left=[
_PrettyToken(_PrettyTokenType.BODY, '4'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_LEFT, '-1'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
], right=[
_PrettyToken(_PrettyTokenType.BODY, '4'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_RIGHT, '5'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]),
]
actual = _make_diff_between_file_and_file(a, b, compare_mode=compare_mode)
self.assertEqual(actual, expected)
def test_line_difflib(self) -> None:
a = ''.join([
'1 3\n',
'wow\n',
'he llo word\n',
])
b = ''.join([
'1 2 3\n',
'wow\n',
'hello world\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
expected = [
_LineDiffOp(lineno=0, left=[
_PrettyToken(_PrettyTokenType.BODY, '1'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY, '3'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
], right=[
_PrettyToken(_PrettyTokenType.BODY, '1'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_RIGHT, '2'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY, '3'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]),
_LineDiffOp(lineno=2, left=[
_PrettyToken(_PrettyTokenType.BODY, 'he'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY, 'llo'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY, 'word'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
], right=[
_PrettyToken(_PrettyTokenType.BODY, 'hello'),
_PrettyToken(_PrettyTokenType.WHITESPACE, ' '),
_PrettyToken(_PrettyTokenType.BODY, 'wor'),
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_RIGHT, 'l'),
_PrettyToken(_PrettyTokenType.BODY, 'd'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]),
]
actual = _make_diff_between_file_and_file(a, b, compare_mode=compare_mode)
self.assertEqual(actual, expected)
def test_file_difflib(self) -> None:
a = ''.join([
'foo\n',
'baz\n',
'hello\n',
'world\n',
'hey\n',
'wow\n',
])
b = ''.join([
'foo\n',
'bar\n',
'baz\n',
'hello\n',
'world\n',
'wow\n',
'wow\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
expected = [
_LineDiffOp(lineno=1, left=None, right=[
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_RIGHT, 'bar'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]),
_LineDiffOp(lineno=4, left=[
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_LEFT, 'hey'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
], right=None),
_LineDiffOp(lineno=6, left=None, right=[
_PrettyToken(_PrettyTokenType.BODY_HIGHLIGHT_RIGHT, 'wow'),
_PrettyToken(_PrettyTokenType.NEWLINE, '\n'),
]),
]
actual = _make_diff_between_file_and_file(a, b, compare_mode=compare_mode)
self.assertEqual(actual, expected)
class MakePrettyDiffTest(unittest.TestCase):
def test_word_by_word(self) -> None:
a = ''.join([
'1 2 3\n',
'4 -1\n',
'6\n',
])
b = ''.join([
'1 2 3\n',
'4 5\n',
'6\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
char_in_line = 40
limit = 40
expected = textwrap.dedent("""\
output: expected:
1| 1_2_3 1| 1_2_3
2| 4_-1 2| 4_5
3| 6 3| 6
""")
font_dim = lambda s: s
font_bold = lambda s: s
font_red = lambda s: s
font_blue = lambda s: s
tokens = _tokenize_pretty_diff(a, expected=b, compare_mode=compare_mode, char_in_line=char_in_line, limit=limit)
actual = _render_tokens(tokens=tokens, font_dim=font_dim, font_bold=font_bold, font_red=font_red, font_blue=font_blue)
self.assertEqual(actual, expected)
def test_line_difflib(self) -> None:
a = ''.join([
'1 3\n',
'wow\n',
'he llo word\n',
])
b = ''.join([
'1 2 3\n',
'wow\n',
'hello world\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
char_in_line = 40
limit = 40
expected = textwrap.dedent("""\
output: expected:
1| 1_3 1| 1_2_3
2| wow 2| wow
3| he_llo_word 3| hello_world
""")
font_dim = lambda s: s
font_bold = lambda s: s
font_red = lambda s: s
font_blue = lambda s: s
tokens = _tokenize_pretty_diff(a, expected=b, compare_mode=compare_mode, char_in_line=char_in_line, limit=limit)
actual = _render_tokens(tokens=tokens, font_dim=font_dim, font_bold=font_bold, font_red=font_red, font_blue=font_blue)
self.assertEqual(actual, expected)
def test_file_difflib(self) -> None:
a = ''.join([
'foo\n',
'baz\n',
'hello\n',
'world\n',
'hey\n',
'wow\n',
])
b = ''.join([
'foo\n',
'bar\n',
'baz\n',
'hello\n',
'world\n',
'wow\n',
'wow\n',
])
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
char_in_line = 40
limit = 40
expected = textwrap.dedent("""\
output: expected:
1| foo 1| foo
2| bar
2| baz 3| baz
3| hello 4| hello
4| world 5| world
5| hey
6| wow 6| wow
7| wow
""")
font_dim = lambda s: s
font_bold = lambda s: s
font_red = lambda s: s
font_blue = lambda s: s
tokens = _tokenize_pretty_diff(a, expected=b, compare_mode=compare_mode, char_in_line=char_in_line, limit=limit)
actual = _render_tokens(tokens=tokens, font_dim=font_dim, font_bold=font_bold, font_red=font_red, font_blue=font_blue)
self.assertEqual(actual, expected)
class MakePrettyDiffLimitTest(unittest.TestCase):
def test_with_limit(self) -> None:
a = ''.join([
'a\n',
] * 100)
b = ''.join([
'b\n',
] * 100)
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
char_in_line = 40
limit = 40
expected = 1 + limit + 1
font_dim = lambda s: s
font_bold = lambda s: s
font_red = lambda s: s
font_blue = lambda s: s
tokens = _tokenize_pretty_diff(a, expected=b, compare_mode=compare_mode, char_in_line=char_in_line, limit=limit)
actual = _render_tokens(tokens=tokens, font_dim=font_dim, font_bold=font_bold, font_red=font_red, font_blue=font_blue)
self.assertEqual(len(actual.splitlines()), expected)
def test_without_limit(self) -> None:
a = ''.join([
'a\n',
] * 100)
b = ''.join([
'b\n',
] * 100)
compare_mode = CompareMode.CRLF_INSENSITIVE_EXACT_MATCH
char_in_line = 40
limit = -1
expected = 1 + 100
font_dim = lambda s: s
font_bold = lambda s: s
font_red = lambda s: s
font_blue = lambda s: s
tokens = _tokenize_pretty_diff(a, expected=b, compare_mode=compare_mode, char_in_line=char_in_line, limit=limit)
actual = _render_tokens(tokens=tokens, font_dim=font_dim, font_bold=font_bold, font_red=font_red, font_blue=font_blue)
self.assertEqual(len(actual.splitlines()), expected)
| 35.754
| 251
| 0.561895
| 1,814
| 17,877
| 5.223815
| 0.07387
| 0.230477
| 0.113972
| 0.082524
| 0.855846
| 0.814162
| 0.787041
| 0.768784
| 0.735331
| 0.690798
| 0
| 0.014597
| 0.321698
| 17,877
| 499
| 252
| 35.825651
| 0.766865
| 0.004139
| 0
| 0.751142
| 0
| 0
| 0.096145
| 0
| 0
| 0
| 0
| 0
| 0.057078
| 1
| 0.057078
| false
| 0
| 0.011416
| 0
| 0.084475
| 0.002283
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
51325c99fe1f1d09c761b7895273b4820b21a207
| 43,668
|
py
|
Python
|
chc/proof/CPOPredicate.py
|
Databean/CodeHawk-C
|
98720753beb51e0bf5105f8f6838618292fbf55c
|
[
"MIT"
] | 10
|
2020-08-17T15:35:55.000Z
|
2022-03-23T14:39:57.000Z
|
chc/proof/CPOPredicate.py
|
kestreltechnology/CodeHawk-C
|
db0fa92fa630cd919f29021d464533f0e7170fed
|
[
"MIT"
] | 31
|
2020-07-17T05:45:43.000Z
|
2021-05-29T04:49:49.000Z
|
chc/proof/CPOPredicate.py
|
kestreltechnology/CodeHawk-C
|
db0fa92fa630cd919f29021d464533f0e7170fed
|
[
"MIT"
] | 3
|
2020-06-13T05:32:34.000Z
|
2021-09-16T02:31:39.000Z
|
# ------------------------------------------------------------------------------
# CodeHawk C Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2017-2020 Kestrel Technology LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
import chc.app.CDictionaryRecord as CD
import chc.app.CExp as CX
po_predicate_names = {
"ab": "allocation-base",
"b": "buffer",
"c": "cast",
"cb": "common-base",
"cbt": "common-base-type",
"cls": "can-leave-scope",
"cr": "controlled-resource",
"cssl": "signed-to-signed-cast-lb",
"cssu": "signed-to-signed-cast-ub",
"csul": "signed-to-unsigned-cast-lb",
"csuu": "signed-to-unsigned-cast-ub",
"cus": "unsigned-to-signed-cast",
"cuu": "unsigned-to-unsigned-cast",
"dr": "distinct-region",
"fc": "format-cast",
"ft": "format-string",
"ga": "global-address",
"ha": "heap-address",
"i": "initialized",
"ilb": "index-lower-bound",
"io": "int-overflow",
"ir": "initialized-range",
"is": "in-scope",
"iu": "int-underflow",
"iub": "index-upper-bound",
"lb": "lower-bound",
"nm": "new-memory",
"nn": "not-null",
"nneg": "non-negative",
"no": "no-overlap",
"nt": "null-terminated",
"null": "null",
"pc": "pointer-cast",
"plb": "ptr-lower-bound",
"pre": "precondition",
"prm": "preserved-all-memory",
"pub": "ptr-upper-bound",
"pubd": "ptr-upper-bound-deref",
"pv": "preserves-value",
"sae": "stack-address-escape",
"tao": "type-at-offset",
"ub": "upper-bound",
"uio": "uint-overflow",
"uiu": "uint-underflow",
"va": "var-args",
"vc": "value-constraint",
"vm": "valid-mem",
"w": "width-overflow",
"z": "not-zero",
}
def get_predicate_tag(name):
revnames = {v: k for (k, v) in po_predicate_names.items()}
if name in revnames:
return revnames[name]
class CPOPredicate(CD.CDictionaryRecord):
def __init__(self, cd, index, tags, args):
CD.CDictionaryRecord.__init__(self, cd, index, tags, args)
def get_tag(self):
return po_predicate_names[self.tags[0]]
def is_allocation_base(self):
return False
def is_buffer(self):
return False
def is_cast(self):
return False
def is_common_base(self):
return False
def is_format_cast(self):
return False
def is_controlled_resource(self):
return False
def is_format_string(self):
return False
def is_in_scope(self):
return False
def is_can_leave_scope(self):
return False
def is_global_address(self):
return False
def is_heap_address(self):
return False
def is_index_lower_bound(self):
return False
def is_index_upper_bound(self):
return False
def is_initialized(self):
return False
def is_initialized_range(self):
return False
def is_int_overflow(self):
return False
def is_int_underflow(self):
return False
def is_lower_bound(self):
return False
def is_new_memory(self):
return False
def is_non_negative(self):
return False
def is_no_overlap(self):
return False
def is_not_null(self):
return False
def is_not_zero(self):
return False
def is_null(self):
return False
def is_null_terminated(self):
return False
def is_pointer_cast(self):
return False
def is_preserved_all_memory(self):
return False
def is_ptr_lower_bound(self):
return False
def is_ptr_upper_bound(self):
return False
def is_ptr_upper_bound_deref(self):
return False
def is_rev_buffer(self):
return False
def is_signed_to_signed_cast_lb(self):
return False
def is_signed_to_signed_cast_ub(self):
return False
def is_signed_to_unsigned_cast_lb(self):
return False
def is_signed_to_unsigned_cast_ub(self):
return False
def is_unsigned_to_signed_cast(self):
return False
def is_unsigned_to_unsigned_cast(self):
return False
def is_stack_address_escape(self):
return False
def is_type_at_offset(self):
return False
def is_upper_bound(self):
return False
def is_valid_mem(self):
return False
def is_value_constraint(self):
return False
def is_width_overflow(self):
return False
def has_variable(self, vid):
return False
def has_variable_op(self, vid, op):
return False
def has_argument(self, vid):
return False
def has_variable_deref(self, vid):
return False
def has_ref_type(self):
return False
def __str__(self):
return "po-predicate " + self.tags[0]
class CPONotNull(CPOPredicate):
"""
tags:
0: 'nn'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_not_null(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOGlobalAddress(CPOPredicate):
"""
tags:
0: 'ga'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_global_address(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOHeapAddress(CPOPredicate):
"""
tags:
0: 'ha'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(int(self.args[0]))
def is_heap_address(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPODistinctRegion(CPOPredicate):
"""
tags:
0: 'dr'
args:
0: exp
1: memref index
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(int(self.args[0]))
def get_memref(self):
return int(self.args[1])
def is_distinct_region(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ","
+ str(self.get_memref())
+ ")"
)
class CPONull(CPOPredicate):
"""
tags:
0: 'null'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_null(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOValidMem(CPOPredicate):
"""
tags:
0: 'vm'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_valid_mem(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOControlledResource(CPOPredicate):
"""
tags:
0: 'cr',
1: name of resource (e.g., memory)
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_resource(self):
return self.tags[1]
def is_controlled_resource(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return (
self.get_tag() + ":" + self.get_resource() + "(" + str(self.get_exp()) + ")"
)
class CPOCanLeaveScope(CPOPredicate):
"""
tags:
0: 'cls'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_can_leave_scope(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOStackAddressEscape(CPOPredicate):
"""
tags:
0: 'sae'
args:
0: lval option
1: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_lval(self):
return self.cd.dictionary.get_lval(int(self.args[0]))
def get_exp(self):
return self.cd.dictionary.get_exp(int(self.args[1]))
def has_lval(self):
return (int(self.args[0])) >= 0
def is_stack_address_escape(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
lval = ""
if self.has_lval():
lval = str(self.get_lval()) + ","
return self.get_tag() + "(" + lval + str(self.get_exp()) + ")"
class CPOInScope(CPOPredicate):
"""
tags:
0: 'is'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_in_scope(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOAllocationBase(CPOPredicate):
"""
tags:
0: 'ab'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_allocation_base(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPONewMemory(CPOPredicate):
"""
tags:
0: 'nm'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_new_memory(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOBuffer(CPOPredicate):
"""
tags:
0: 'b'
args:
0: exp (pointer to buffer)
1: exp (length of buffer in bytes)
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_length(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_buffer(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid) or self.get_length().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",size:"
+ str(self.get_length())
+ ")"
)
class CPORevBuffer(CPOPredicate):
"""
tags:
0: 'b'
args:
0: exp (pointer to buffer)
1: exp (length of buffer in bytes before pointer)
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_length(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_rev_buffer(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid) or self.get_length().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",size:"
+ str(self.get_length())
+ ")"
)
class CPOTypeAtOffset(CPOPredicate):
"""
tags:
0: 'tao'
args:
0: typ
1: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[1])
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def is_type_at_offset(self):
return True
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_type())
+ ","
+ str(self.get_exp())
+ ")"
)
class CPOLowerBound(CPOPredicate):
"""
tags:
0: 'lb'
args:
0: typ
1: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_lower_bound(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_type())
+ ","
+ str(self.get_exp())
+ ")"
)
class CPOUpperBound(CPOPredicate):
"""
tags:
0: 'ub'
args:
0: typ
1: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[1])
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def is_upper_bound(self):
return True
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_type())
+ ","
+ str(self.get_exp())
+ ")"
)
class CPOIndexLowerBound(CPOPredicate):
"""
tags:
0: 'ilb'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_index_lower_bound(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOIndexUpperBound(CPOPredicate):
"""
tags:
0: 'iub'
args:
0: index-exp
1: upperbound exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_bound(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_index_upper_bound(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",bound:"
+ str(self.get_bound())
+ ")"
)
class CPOInitialized(CPOPredicate):
"""
tags:
0: 'i'
args:
0: lval
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_lval(self):
return self.cd.dictionary.get_lval(self.args[0])
def is_initialized(self):
return True
def has_variable(self, vid):
return self.get_lval().has_variable(vid)
def has_variable_deref(self, vid):
return self.get_lval().has_variable_deref(vid)
def has_ref_type(self):
return self.get_lval().has_ref_type()
def __str__(self):
return self.get_tag() + "(" + str(self.get_lval()) + ")"
class CPOInitializedRange(CPOPredicate):
"""
tags:
0: 'ir'
args:
0: exp (pointer to start of address range)
1: len-exp (number of bytes that should be initialized)
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_length(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_initialized_range(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",len:"
+ str(self.get_length())
+ ")"
)
class CPOCast(CPOPredicate):
"""
tags:
0: 'c'
args:
0: typ (tfrom, current)
1: typ (tto, target)
2: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_from_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_tgt_type(self):
return self.cd.dictionary.get_typ(self.args[1])
def is_cast(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ str(self.get_from_type())
+ ",to:"
+ str(self.get_tgt_type())
+ ")"
)
class CPOFormatCast(CPOPredicate):
"""
tags:
0: 'c'
args:
0: typ (tfrom, current)
1: typ (tto, target)
2: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_from_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_tgt_type(self):
return self.cd.dictionary.get_typ(self.args[1])
def is_format_cast(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ str(self.get_from_type())
+ ",to:"
+ str(self.get_tgt_type())
+ ")"
)
class CPOPointerCast(CPOPredicate):
"""
tags:
0: 'pc'
args:
0: typ (tfrom, current)
1: typ (tto, target)
2: exp (pointed-to expression)
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_from_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_tgt_type(self):
return self.cd.dictionary.get_typ(self.args[1])
def is_pointer_cast(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def has_argument(self, vid):
if self.get_exp().is_lval():
lhost = self.get_exp().get_lval().get_lhost()
return lhost.is_var() and lhost.get_vid() == vid
else:
return False
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ str(self.get_from_type())
+ ",to:"
+ str(self.get_tgt_type())
+ ")"
)
class CPOSignedToUnsignedCastLB(CPOPredicate):
"""
tags:
0: 'csul'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_signed_to_unsigned_cast_lb(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPOSignedToUnsignedCastUB(CPOPredicate):
"""
tags:
0: 'csuu'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_signed_to_unsigned_cast_ub(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPOUnsignedToSignedCast(CPOPredicate):
"""
tags:
0: 'cus'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_unsigned_to_signed_cast(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPOUnsignedToUnsignedCast(CPOPredicate):
"""
tags:
0: 'cuu'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_unsigned_to_unsigned_cast(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPOSignedToSignedCastLB(CPOPredicate):
"""
tags:
0: 'cssl'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_signed_to_signed_cast_lb(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPOSignedToSignedCastUB(CPOPredicate):
"""
tags:
0: 'cssu'
1: from ikind
2: tgt ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_from_kind(self):
return self.tags[1]
def get_tgt_kind(self):
return self.tags[2]
def is_signed_to_signed_cast_ub(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",from:"
+ self.get_from_kind()
+ ",to:"
+ self.get_tgt_kind()
+ ")"
)
class CPONotZero(CPOPredicate):
"""
tags:
0: 'z'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_not_zero(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPONonNegative(CPOPredicate):
"""
tags:
0: 'nneg'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_non_negative(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPONullTerminated(CPOPredicate):
"""
tags:
0: 'nt'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def is_null_terminated(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOIntUnderflow(CPOPredicate):
"""
tags:
0: 'iu'
1: binop
2: ikind
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_binop(self):
return self.tags[1]
def get_ikind(self):
return self.tags[2]
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_int_underflow(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ",op:"
+ self.get_binop()
+ ",ikind:"
+ self.get_ikind()
+ ")"
)
class CPOIntOverflow(CPOPredicate):
"""
tags:
0: 'io'
1: binop
2: ikind
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_binop(self):
return self.tags[1]
def get_ikind(self):
return self.tags[2]
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_int_overflow(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ",op:"
+ self.get_binop()
+ ",ikind:"
+ self.get_ikind()
+ ")"
)
class CPOUIntUnderflow(CPOPredicate):
"""
tags:
0: 'uiu'
1: binop
2: ikind
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_binop(self):
return self.tags[1]
def get_ikind(self):
return self.tags[2]
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_int_underflow(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ",op:"
+ self.get_binop()
+ ",ikind:"
+ self.get_ikind()
+ ")"
)
class CPOUIntOverflow(CPOPredicate):
"""
tags:
0: 'uio'
1: binop
2: ikind
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_binop(self):
return self.tags[1]
def get_ikind(self):
return self.tags[2]
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_int_overflow(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ",op:"
+ self.get_binop()
+ ",ikind:"
+ self.get_ikind()
+ ")"
)
class CPOWidthOverflow(CPOPredicate):
"""
tags:
0: 'w'
1: ikind
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_ikind(self):
return self.tags[1]
def is_width_overflow(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp())
+ ",kind:"
+ self.get_ikind()
+ ")"
)
class CPOPtrLowerBound(CPOPredicate):
"""
tags:
0: 'plb'
1: binop
args:
0: typ
1: exp1
2: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[1])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_binop(self):
return self.tags[1]
def is_ptr_lower_bound(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ",op:"
+ self.get_binop()
+ ",typ:"
+ str(self.get_type())
+ ")"
)
class CPOPtrUpperBound(CPOPredicate):
"""
tags:
0: 'pl=ub'
1: binop
args:
0: typ
1: exp1
2: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[1])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_binop(self):
return self.tags[1]
def is_ptr_upper_bound(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def has_variable_op(self, vid, op):
return self.get_exp1().has_variable_op(
vid, op
) or self.get_exp2().has_variable_op(vid, op)
def __str__(self):
return (
self.get_tag()
+ "(typ:"
+ str(self.get_type())
+ ",op:"
+ self.get_binop()
+ ","
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ")"
)
class CPOPtrUpperBoundDeref(CPOPredicate):
"""
tags:
0: 'pubd'
1: binop
args:
0: typ
1: exp1
2: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_type(self):
return self.cd.dictionary.get_typ(self.args[0])
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[1])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[2])
def get_binop(self):
return self.tags[1]
def is_ptr_upper_bound_deref(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "(typ:"
+ str(self.get_type())
+ ",op:"
+ self.get_binop()
+ ","
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ")"
)
class CPOCommonBase(CPOPredicate):
"""
tags:
0: 'cb'
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_common_base(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ")"
)
class CPOCommonBaseType(CPOPredicate):
"""
tags:
0: 'cbt'
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ")"
)
class CPOFormatString(CPOPredicate):
"""
tags:
0: 'ft'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(int(self.args[0]))
def is_format_string(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return self.get_tag() + "(" + str(self.get_exp()) + ")"
class CPOVarArgs(CPOPredicate):
"""
tags:
0: 'va'
args:
0: exp (format string)
1: int (expected number of arguments)
r: exps (actual arguments)
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_formatstring(self):
return self.cd.dictionary.get_exp(int(self.args[1]))
def get_argcount(self):
return int(self.args[0])
def get_arguments(self):
return [self.cd.dictionary.get_exp(int(x)) for x in self.args[2:]]
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_formatstring())
+ ","
+ str(self.get_argcount())
+ ","
+ str(len(self.get_arguments()))
+ ")"
)
class CPONoOverlap(CPOPredicate):
"""
tags:
0: 'no'
args:
0: exp1
1: exp2
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp1(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_exp2(self):
return self.cd.dictionary.get_exp(self.args[1])
def is_no_overlap(self):
return True
def has_variable(self, vid):
return self.get_exp1().has_variable(vid) or self.get_exp2().has_variable(vid)
def __str__(self):
return (
self.get_tag()
+ "("
+ str(self.get_exp1())
+ ","
+ str(self.get_exp2())
+ ")"
)
class CPOValueConstraint(CPOPredicate):
"""
tags:
0: 'vc'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def get_tag(self):
return CPOPredicate.get_tag(self) # + ':' + str(self.get_exp())
def is_value_constraint(self):
return True
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return str(self.get_exp())
class CPOPreservedAllMemory(CPOPredicate):
"""
tags:
0: 'prm'
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def is_preserved_all_memory(self):
return True
def __str__(self):
return self.get_tag()
class CPOPreservedValue(CPOPredicate):
"""
tags:
0: 'pv'
args:
0: exp
"""
def __init__(self, cd, index, tags, args):
CPOPredicate.__init__(self, cd, index, tags, args)
def get_exp(self):
return self.cd.dictionary.get_exp(self.args[0])
def has_variable(self, vid):
return self.get_exp().has_variable(vid)
def __str__(self):
return "preserves-value(" + str(self.get_exp()) + ")"
| 21.954751
| 88
| 0.548777
| 5,405
| 43,668
| 4.165772
| 0.06124
| 0.109256
| 0.092023
| 0.066619
| 0.816264
| 0.799876
| 0.75191
| 0.727127
| 0.714736
| 0.703855
| 0
| 0.011642
| 0.315448
| 43,668
| 1,988
| 89
| 21.965795
| 0.741578
| 0.098356
| 0
| 0.838302
| 0
| 0
| 0.030494
| 0.00447
| 0
| 0
| 0
| 0
| 0
| 1
| 0.323397
| false
| 0
| 0.001807
| 0.270099
| 0.654923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
5ac4f48991ca83a2eb902daef58390182cff0376
| 74,769
|
py
|
Python
|
tests/python/gpu/test_norm_conv_gpu.py
|
sneaxiy/NVIDIA-MxNet
|
ce30b18212fbf23f68c006a02cc034e417bb5518
|
[
"Apache-2.0"
] | null | null | null |
tests/python/gpu/test_norm_conv_gpu.py
|
sneaxiy/NVIDIA-MxNet
|
ce30b18212fbf23f68c006a02cc034e417bb5518
|
[
"Apache-2.0"
] | null | null | null |
tests/python/gpu/test_norm_conv_gpu.py
|
sneaxiy/NVIDIA-MxNet
|
ce30b18212fbf23f68c006a02cc034e417bb5518
|
[
"Apache-2.0"
] | 3
|
2021-07-20T07:40:15.000Z
|
2021-08-03T08:39:17.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import sys
import os
import mxnet as mx
import numpy as np
from mxnet.test_utils import default_context, set_default_context, assert_almost_equal
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.insert(0, os.path.join(curr_path, '../unittest'))
from common import setup_module, with_seed, teardown
set_default_context(mx.gpu(0))
# Helper function that returns True/False with equal probability.
def _random_boolean():
return np.random.randint(0,2) == 0
# Helper function to the normalized convolution tests
# Return the indices (along the feature dimension) that have relu inputs near 0.
def _has_near_zero_outputs(x, b, g, eps, threshold):
ctx = default_context()
X = mx.sym.Variable('X')
B = mx.sym.Variable('B') # beta, i.e. bias
G = mx.sym.Variable('G') # gamma, i.e. scale
MovMean = mx.sym.Variable('MovMean')
MovVar = mx.sym.Variable('MovVar')
feature_shape = b.shape
mov_mean = mx.nd.zeros(feature_shape, dtype=np.float32, ctx=ctx)
mov_var = mx.nd.ones(feature_shape, dtype=np.float32, ctx=ctx)
bn_sym = mx.sym.BatchNorm(data=X, gamma=G, beta=B, act_type=None,
moving_mean=MovMean, moving_var=MovVar,
eps=eps, momentum=0.9, fix_gamma=False,
use_global_stats=False, output_mean_var=False,
cudnn_off=False, name=None, axis=-1)
args_dict = {'X':x, 'B':b, 'G':g,}
aux_states_dict = {'MovMean':mov_mean, 'MovVar':mov_var}
grad_req = {'MovMean':'null', 'MovVar':'null',
'X':'null', 'W':'null', 'G':'null', 'B':'null'}
bn_exe = bn_sym.bind(ctx=ctx, args=args_dict,
aux_states=aux_states_dict, grad_req=grad_req)
# Execute forward() graph calculation
# need is_train=True to keep Batchnorm using the mini-batch mean and variance
bn_outputs = bn_exe.forward(is_train=True)
out_data = bn_outputs[0].asnumpy()
out_data_abs = np.abs(out_data)
not_feature_axes = (0, 1, 2)
origin_dist_mins = out_data_abs.min(axis=not_feature_axes)
bad_indices = np.nonzero(origin_dist_mins < threshold)[0]
return bad_indices
@with_seed()
def test_norm_convolution():
ctx = default_context()
cuda_arch_list = [70, 75, 80, 86]
cuda_arch = mx.context.gpu_sm_arch(ctx.device_id)
if cuda_arch not in cuda_arch_list:
print('Bypassing normalized convolution test on cuda arch {} supported versions are {}).'.format(
cuda_arch, cuda_arch_list))
return
# RN50 layer shapes
nchw_shapes = [
( 64, 256, 56, 56),
( 64, 128, 28, 28),
( 64, 512, 28, 28),
( 64, 256, 14, 14),
( 64, 1024, 14, 14),
( 64, 512, 7, 7),
( 64, 2048, 7, 7),
(128, 64, 56, 56),
(128, 256, 56, 56),
(128, 128, 28, 28),
(128, 512, 28, 28),
(128, 256, 14, 14),
(128, 1024, 14, 14),
(128, 512, 7, 7),
(128, 2048, 7, 7),
]
# Make dataset stats (to input to BNStatsFinalize)
def create_input_stats_np(data_np):
data_fp32_np = data_np.astype(np.float32)
not_feature_axes = (0, 1, 2)
feature_sum_np = data_fp32_np.sum(axis=not_feature_axes)
feature_sum_squares_np = np.square(data_fp32_np).sum(axis=not_feature_axes)
return (feature_sum_np, feature_sum_squares_np)
def create_output_stats(data, output_stats):
if output_stats:
data_fp32 = mx.sym.cast(data, np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
feature_sum_squares = data_fp32.square().sum(axis=not_feature_axes)
return mx.sym.Group([data, feature_sum, feature_sum_squares])
else:
return data
def out_shape(nhwc_inshape, num_filters, kernel_shape, stride, pad):
(n, h, w, _) = nhwc_inshape
(kernel_h, kernel_w) = kernel_shape
(stride_h, stride_w) = stride
(pad_h, pad_w) = pad
out_shape_h = 1 + ((h + 2 * pad_h - kernel_h) // stride_h)
out_shape_w = 1 + ((w + 2 * pad_w - kernel_w) // stride_w)
return (n, out_shape_h, out_shape_w, num_filters)
# flip a dataset about the 1st dimension
def flip(data):
return mx.sym.flip(data, axis=0)
# return a new symbol that isolates the input symbol's outputs
def buffer(sym):
num_outputs = len(sym.list_outputs())
if num_outputs == 1:
return flip(flip(sym))
else:
flipped_outputs = [ flip(flip(sym[i])) for i in range(num_outputs)]
return mx.sym.Group(flipped_outputs)
# Test fused op without input normalization. Options for activation and output of stats.
def finalize_norm_conv_test(nchw_inshape, kernel_shape, num_filter, act_type, stride,
pad, output_stats, no_norm, no_conv,
eps, momentum):
# If we are disabling the convolution (no_conv = True), then set kernel=1x1 and weights 1
if no_conv:
if kernel_shape != (1,1):
print('Ignoring kernel_shape {}, forcing 1x1 in no_conv mode.'.format(kernel_shape))
kernel_shape = (1,1)
(n, c, h, w) = nchw_inshape
X = mx.sym.Variable('X')
W = mx.sym.Variable('W')
SUM = mx.sym.Variable('SUM')
SUMSQ = mx.sym.Variable('SUMSQ')
B = mx.sym.Variable('B') # beta, i.e. bias
G = mx.sym.Variable('G') # gamma, i.e. scale
# randomly insert buffering here to exercise in-place vs. copy of gamma/beta by Finalize
if _random_boolean():
B = buffer(B)
G = buffer(G)
MovMean = mx.sym.Variable('MovMean')
MovVar = mx.sym.Variable('MovVar')
# make 'ground truth' symbol using standard Batchnorm and Convolution
if no_norm and (act_type is None):
normalized = X
elif no_norm:
normalized = mx.sym.Activation(data=X, act_type=act_type)
else:
normalized = mx.sym.BatchNorm(data=X, gamma=G, beta=B, act_type=act_type,
moving_mean=MovMean, moving_var=MovVar,
eps=eps, momentum=momentum, fix_gamma=False,
use_global_stats=False, output_mean_var=False,
cudnn_off=False, name=None, axis=-1)
(r, s) = kernel_shape
layout = 'NHWC'
conv_args = {'weight':W, 'num_filter':num_filter, 'kernel':kernel_shape,
'stride':stride, 'pad':pad, 'layout':layout, 'name':'conv'}
# For test failure reproducibility, fix the algos of the golden copy
conv_sym = mx.sym.Convolution(data=normalized, no_bias=True,
cudnn_algo_fwd=1,
cudnn_algo_bwd_data=1,
cudnn_algo_bwd_filter=1,
**conv_args)
conv_sym = create_output_stats(conv_sym, output_stats)
# make symbol-under-test using NormConvolution
if not no_norm:
# NormConvolution makes use of conv_args but for stats-apply mode has more inputs:
conv_args.update({'in_sum':SUM, 'in_sum_squares':SUMSQ, 'gamma':G, 'beta':B,
'moving_mean':MovMean, 'moving_var':MovVar, 'eps':eps,
'momentum':momentum, 'fix_gamma':False,
'output_mean_var':True
})
norm_conv_sym = mx.sym.NormConvolution(X, act_type=act_type,
no_norm=no_norm,
**conv_args)
if not output_stats:
# discard sum and sum_squares outputs before binding
norm_conv_sym = norm_conv_sym[0]
# make data inputs
weight_shape = (num_filter, r, s, c)
data_shape = (n, h, w, c)
# x_np = np.fromfunction(lambda n, h, w, c: 3*((n+h+w)%2), data_shape)
# x = mx.nd.array(x_np, dtype=np.float16, ctx=ctx)
x = mx.ndarray.random.uniform(-0.5, 0.5, data_shape, dtype=np.float16, ctx=ctx)
(feature_sum_np, feature_sum_squares_np) = create_input_stats_np(x.asnumpy())
sum = mx.nd.array(feature_sum_np, dtype=np.float32)
sum_squares = mx.nd.array(feature_sum_squares_np, dtype=np.float32)
equiv_scale_bias_shape = (c,)
scale_max = 1.25
bias_max = 1
# Comparing gradients of two symbols is tricky when a non-smooth function like 'relu'
# is part of the function. We ensure that no relu inputs are near 0 (within a threshold)
# by trying different beta/gamma values as needed.
b_np = np.zeros(equiv_scale_bias_shape, dtype=np.float32)
g_np = np.zeros(equiv_scale_bias_shape, dtype=np.float32)
indices_to_set = np.array(range(c))
while len(indices_to_set) > 0:
for index in indices_to_set:
b_np[index] = np.random.uniform(-bias_max, bias_max)
g_np[index] = np.random.uniform(1.0/scale_max, scale_max)
b = mx.nd.array(b_np, dtype=np.float32, ctx=ctx)
g = mx.nd.array(g_np, dtype=np.float32, ctx=ctx)
smallest_norm_fp16 = pow(2, -14)
threshold = smallest_norm_fp16 / 2
need_data_check = not no_norm and act_type == 'relu'
if need_data_check:
indices_to_set = _has_near_zero_outputs(x, b, g, eps, threshold=threshold)
else:
indices_to_set = []
# mov_mean_np = np.zeros(equiv_scale_bias_shape).astype(np.float32)
# mov_var_np = np.ones(equiv_scale_bias_shape).astype(np.float32)
mov_mean_np = np.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape)
mov_var_np = np.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape)
# since the models change the moving mean and variance, each model gets their own copy
mov_mean1 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_mean2 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_var1 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
mov_var2 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
if no_conv:
weights = mx.ndarray.ones(weight_shape, dtype=np.float16, ctx=ctx)
else:
weights = mx.ndarray.random.uniform(-0.20, 0.20, weight_shape, dtype=np.float16, ctx=ctx)
# These are the tensor's that receive the backpropped gradients (so an output of backward())
# Copy 1 is for 'ground truth' symbol based on BatchNorm/Convolution ops
d_x_out_gt = mx.ndarray.zeros(data_shape, dtype=np.float16, ctx=ctx)
d_w_out_gt = mx.ndarray.zeros(weight_shape, dtype=np.float16, ctx=ctx)
d_gamma_out_gt = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
d_beta_out_gt = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
# Copy 2 is for symbol based on BNStatsFinalize/NormConvolution ops (=ones, not zeros)
d_x_out = mx.ndarray.ones(data_shape, dtype=np.float16, ctx=ctx)
d_w_out = mx.ndarray.ones(weight_shape, dtype=np.float16, ctx=ctx)
d_gamma_out = mx.ndarray.ones(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
d_beta_out = mx.ndarray.ones(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
# bind i/o's to symbols to create executors
grad_req = {'SUM':'null', 'SUMSQ':'null', 'MovMean':'null', 'MovVar':'null',
'X':'write', 'W':'write', 'G':'write', 'B':'write'}
args_grad_dict_gt = {'X':d_x_out_gt, 'W':d_w_out_gt, 'G':d_gamma_out_gt, 'B':d_beta_out_gt}
args_grad_dict = {'X':d_x_out, 'W':d_w_out, 'G':d_gamma_out, 'B':d_beta_out}
args_dict = {'X':x, 'W':weights}
# conv binding does not need SUM, and SUMSQ, but extra items are OK
if not no_norm:
args_dict.update({'B':b, 'G':g, 'SUM':sum, 'SUMSQ':sum_squares})
gt_aux_states_dict = \
{'MovMean':mov_mean1, 'MovVar':mov_var1}
finalize_aux_states_dict = \
{'MovMean':mov_mean2, 'MovVar':mov_var2}
conv_exe = conv_sym.bind(ctx=ctx, args=args_dict, args_grad=args_grad_dict_gt,
aux_states=gt_aux_states_dict, grad_req=grad_req)
norm_conv_exe = norm_conv_sym.bind(ctx=ctx, args=args_dict, args_grad=args_grad_dict,
aux_states=finalize_aux_states_dict, grad_req=grad_req)
# Execute forward() graph calculation
# need is_train=True to keep Batchnorm using the mini-batch mean and variance
conv_outputs = conv_exe.forward(is_train=True)
# need is_train=True to keep stats from being turned off
norm_conv_outputs = norm_conv_exe.forward(is_train=True)
# Check forward outputs
outputs = ['out', 'sum', 'sum_squares']
# greater atols needs for 'sum' and 'sum_squares', also if input scale/bias is applied
if no_norm:
tols = [(1e-2, 2e-2), (1e-2, 2), (1e-2, 2)]
else:
# 'sum' seems to have a large span (e.g. -400K -> +400K) so a large absolute tolerance
# is needed to cover those cases when the result is near 0 and rtol can't help.
# One possible source of the large sum tolerance is the internal rounding of the
# mean to fp16. Any rounding amount will give a bias to the conv inputs and so the sum.
# 'sum_squares' doesn't have this issue because rtol handles the always-positive result.
per_element_atol = 5e-3
sum_atol = n * h * w * per_element_atol
tols = [(1e-2, 1e-1), (1e-1, sum_atol), (1e-2, 2)]
num_outputs = 3 if output_stats else 1
for idx in range(num_outputs):
out_name = outputs[idx]
conv_data = conv_outputs[idx]
norm_conv_data = norm_conv_outputs[idx]
(rtol, atol) = tols[idx]
assert_almost_equal(conv_data, norm_conv_data, rtol=rtol, atol=atol,
names=('conv_{}'.format(out_name),
'norm_conv_{}'.format(out_name)))
# Check backward function
if no_norm and act_type is not None:
# gradient calculation not supported for this configuration
return
# Create backward gradients
outshape = out_shape(data_shape, num_filter, kernel_shape, stride, pad)
d_out_in = mx.ndarray.random.uniform(-0.2, 0.2, outshape,
dtype=np.float16, ctx=ctx)
# not really needed
sum_shape = (num_filter,)
# gradients on these outputs will be summed into the d_out_in for the ground truth
# symbol, so make sure these are 0.
d_sum_in = mx.ndarray.zeros(sum_shape, dtype=np.float32, ctx=ctx)
d_sum_squares_in = mx.ndarray.zeros(sum_shape, dtype=np.float32, ctx=ctx)
# d_sum_in = mx.ndarray.random.uniform(0.0, 1.0, sum_shape,
# dtype=np.float32, ctx=ctx)
# d_sum_squares_in = mx.ndarray.random.uniform(0.0, 1.0, sum_shape,
# dtype=np.float32, ctx=ctx)
# Execute backward() graph calculation
if output_stats:
conv_outputs = conv_exe.backward([d_out_in, d_sum_in, d_sum_squares_in])
norm_conv_outputs = norm_conv_exe.backward([d_out_in, d_sum_in, d_sum_squares_in])
else:
conv_outputs = conv_exe.backward([d_out_in,])
norm_conv_outputs = norm_conv_exe.backward([d_out_in,])
# Check weight gradient
out_name = 'd_w'
assert_almost_equal(d_w_out_gt, d_w_out, atol=0.3, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Check data gradient
# This check is flakey when act_type = relu because if the two models differ on whether
# the normalized value is above or below 0, then the gradient may or may-not be backpropped.
# To fix this test, we could run a separate model with relu off, capture the normalized
# output and then mask off the gradient comparison when the normalized value is near 0.
out_name = 'd_x'
if act_type is None:
assert_almost_equal(d_x_out_gt, d_x_out, atol=0.1, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Check gamma and beta gradients
out_name = 'd_gamma'
assert_almost_equal(d_gamma_out_gt, d_gamma_out, atol=10, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
out_name = 'd_beta'
assert_almost_equal(d_beta_out_gt, d_beta_out, atol=10, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Test input normalization function only: no_norm = False, 1x1 unity-weights conv
# Also test with 'relu' activation on and off.
print('\nTest of input normalization without convolution function.')
eps = 1e-4
momentum = 0.9
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
num_filter = 32
outshape = (n, num_filter, h, w)
stride = (1,1)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
kernel_shape = (1, 1)
pad = (0, 0)
output_stats = False
act_type = 'relu' if _random_boolean() else None
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
finalize_norm_conv_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_norm=False,
no_conv=True, eps=eps, momentum=momentum)
# Test convolution and stats-gen functions, first without, then with, input normalization.
# Also test with 'relu' activation on and off.
for no_norm in [True, False]:
if no_norm:
print('\nTest of convolution function, without input normalization.')
else:
print('\nTest of convolution function with input normalization.')
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
(stride_h, stride_w) = (1,1)
# Leverage next test case (if available) to determine outshape, strides
if i == len(nchw_shapes)-1:
num_filter = nchw_shapes[i][1]
else:
num_filter = nchw_shapes[i+1][1]
if nchw_shapes[i+1][2] < nchw_shapes[i][2]:
stride_h = nchw_shapes[i][2] // nchw_shapes[i+1][2]
if nchw_shapes[i+1][3] < nchw_shapes[i][3]:
stride_w = nchw_shapes[i][3] // nchw_shapes[i+1][3]
stride = (stride_h, stride_w)
outshape = (n, num_filter, h // stride_h, w // stride_w)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
# Only 3x3 kernel supports strides, not 1x1
kernel_shapes = [(3, 3),] if stride_h > 1 or stride_w > 1 else [(1, 1), (3, 3)]
for kernel_shape in kernel_shapes:
# padding doesn't make sense for a 1x1 kernel
pads = [(0, 0),] if kernel_shape[0] == 1 or kernel_shape[1] == 1 else [(0, 0), (1, 1)]
for pad in pads:
act_type = 'relu' if _random_boolean() else None
output_stats = _random_boolean()
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
finalize_norm_conv_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_norm=no_norm,
no_conv=False, eps=eps, momentum=momentum)
@with_seed()
def test_normalized_convolution():
ctx = default_context()
cuda_arch_list = [70, 75, 80, 86]
cuda_arch = mx.context.gpu_sm_arch(ctx.device_id)
if cuda_arch not in cuda_arch_list:
print('Bypassing normalized convolution test on cuda arch {} supported versions are {}).'.format(
cuda_arch, cuda_arch_list))
return
# RN50 layer shapes
nchw_shapes = [
( 64, 256, 56, 56),
( 64, 128, 28, 28),
( 64, 512, 28, 28),
( 64, 256, 14, 14),
( 64, 1024, 14, 14),
( 64, 512, 7, 7),
( 64, 2048, 7, 7),
(128, 64, 56, 56),
(128, 256, 56, 56),
(128, 128, 28, 28),
(128, 512, 28, 28),
(128, 256, 14, 14),
(128, 1024, 14, 14),
(128, 512, 7, 7),
(128, 2048, 7, 7),
]
# Make dataset stats (to input to BNStatsFinalize)
def create_mean_inv_std(data):
num_features = data.shape[3]
num_elems_per_feature = np.prod(data.shape) / num_features
data_fp32 = data.astype(np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
mean = feature_sum / num_elems_per_feature
squared_error_sum = np.square(data_fp32 - mean).sum(axis=not_feature_axes)
variance = squared_error_sum / num_elems_per_feature
inv_std_dev = 1.0 / np.sqrt(variance)
return (mean, inv_std_dev)
# Prepare the input for a standard Convolution so it will mimic NormalizedConvolution
def normalize_input(data, equiv_scale, equiv_bias, act_type, no_equiv_scale_bias):
normalized = data if no_equiv_scale_bias else \
mx.sym.broadcast_add(mx.sym.broadcast_mul(data, equiv_scale),
equiv_bias)
return normalized if act_type is None else mx.sym.Activation(normalized, act_type=act_type)
# Helper function to the normalized convolution tests
# Return the indices (along the feature dimension) that have relu inputs near 0.
def has_near_zero_outputs(x, b, g, threshold):
X = mx.sym.Variable('X')
B = mx.sym.Variable('B') # beta, i.e. bias
G = mx.sym.Variable('G') # gamma, i.e. scale
norm_sym = mx.sym.broadcast_add(mx.sym.broadcast_mul(X, G), B)
args_dict = {'X':x, 'B':b, 'G':g,}
grad_req = {'X':'null', 'G':'null', 'B':'null'}
norm_exe = norm_sym.bind(ctx=ctx, args=args_dict, grad_req=grad_req)
# Execute forward() graph calculation
# need is_train=True to keep Batchnorm using the mini-batch mean and variance
norm_outputs = norm_exe.forward(is_train=True)
out_data = norm_outputs[0].asnumpy()
out_data_abs = np.abs(out_data)
not_feature_axes = (0, 1, 2)
origin_dist_mins = out_data_abs.min(axis=not_feature_axes)
bad_indices = np.nonzero(origin_dist_mins < threshold)[0]
return bad_indices
# Make dataset stats (to augment standard Convolution) to mimic NormalizedConvolution
def create_output_stats(data, output_stats):
if output_stats:
data_fp32 = mx.sym.cast(data, np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
feature_sum_squares = data_fp32.square().sum(axis=not_feature_axes)
return mx.sym.Group([data, feature_sum, feature_sum_squares])
else:
return data
# Test fused op without input normalization. Options for activation and output of stats.
def convolution_stats_test(nchw_inshape, kernel_shape, num_filter, act_type, stride,
pad, output_stats, no_equiv_scale_bias, no_conv):
# If we are disabling the convolution (no_conv = True), then set kernel=1x1 and weights 1
if no_conv:
if kernel_shape != (1,1):
print('Ignoring kernel_shape {}, forcing 1x1 in no_conv mode.'.format(kernel_shape))
kernel_shape = (1,1)
(n, c, h, w) = nchw_inshape
X = mx.sym.Variable('X')
W = mx.sym.Variable('W')
EB = mx.sym.Variable('EB') # equiv_bias
ES = mx.sym.Variable('ES') # equiv_scale
M = mx.sym.Variable('M') # mean
V = mx.sym.Variable('V') # variance (inv_std_dev actually)
G = mx.sym.Variable('G') # gamma (dummy, only needed for backward)
B = mx.sym.Variable('B') # beta (dummy, only needed for backward)
(r, s) = kernel_shape
layout = 'NHWC'
conv_args = {'weight':W, 'num_filter':num_filter, 'kernel':kernel_shape,
'stride':stride, 'pad':pad, 'layout':layout, 'name':'conv'}
conv_input = normalize_input(data=X, equiv_scale=ES, equiv_bias=EB, act_type=act_type,
no_equiv_scale_bias=no_equiv_scale_bias)
# For test failure reproducibility, fix the algos of the golden copy
conv_sym = mx.sym.Convolution(conv_input, no_bias=True,
cudnn_algo_fwd=1,
cudnn_algo_bwd_data=1,
cudnn_algo_bwd_filter=1,
**conv_args)
conv_sym = create_output_stats(conv_sym, output_stats)
if not no_equiv_scale_bias:
conv_args.update({'equiv_bias':EB, 'equiv_scale':ES, 'mean':M, 'var':V, 'gamma':G, 'beta':B})
norm_conv_sym = mx.sym.NormalizedConvolution(X, act_type=act_type,
no_equiv_scale_bias=no_equiv_scale_bias,
**conv_args)
if not output_stats:
# discard sum and sum_squares outputs before binding
norm_conv_sym = norm_conv_sym[0]
weight_shape = (num_filter, r, s, c)
data_shape = (n, h, w, c)
feature_plane_elements = n * h * w
x = mx.ndarray.random.uniform(-0.5, 0.5, data_shape, dtype=np.float16, ctx=ctx)
(m_np, v_np) = create_mean_inv_std(x.asnumpy())
m = mx.nd.array(m_np, dtype=np.float32, ctx=ctx)
v = mx.nd.array(v_np, dtype=np.float32, ctx=ctx)
if no_conv:
w = mx.ndarray.ones(weight_shape, dtype=np.float16, ctx=ctx)
else:
w = mx.ndarray.random.uniform(-0.5, 0.5, weight_shape, dtype=np.float16, ctx=ctx)
equiv_scale_bias_shape = (c,)
scale_max = 1.25
bias_max = 1
# Comparing gradients of two symbols is tricky when a non-smooth function like 'relu'
# is part of the function. We ensure that no relu inputs are near 0 (within a threshold)
# by trying different beta/gamma values as needed.
eb_np = np.zeros(equiv_scale_bias_shape, dtype=np.float16)
es_np = np.zeros(equiv_scale_bias_shape, dtype=np.float16)
indices_to_set = np.array(range(c))
while len(indices_to_set) > 0:
for index in indices_to_set:
eb_np[index] = np.random.uniform(-bias_max, bias_max)
es_np[index] = np.random.uniform(1.0/scale_max, scale_max)
eb = mx.nd.array(eb_np, dtype=np.float16, ctx=ctx)
es = mx.nd.array(es_np, dtype=np.float16, ctx=ctx)
smallest_norm_fp16 = pow(2, -14)
threshold = smallest_norm_fp16 / 2
need_data_check = not no_equiv_scale_bias and act_type == 'relu'
if need_data_check:
indices_to_set = has_near_zero_outputs(x, eb, es, threshold=threshold)
else:
indices_to_set = []
dummy_g = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
dummy_b = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
args_dict = {'X':x, 'W':w} if no_equiv_scale_bias else {'X':x, 'W':w,
'EB':eb, 'ES':es,
'M':m, 'V':v,
'G':dummy_g, 'B':dummy_b}
conv_exe = conv_sym.bind(ctx=ctx, args=args_dict, grad_req='null')
norm_conv_exe = norm_conv_sym.bind(ctx=ctx, args=args_dict, grad_req='null')
conv_outputs = conv_exe.forward(is_train=False)
# need is_train=True to keep stats from being turned off
norm_conv_outputs = norm_conv_exe.forward(is_train=output_stats)
outputs = ['out', 'sum', 'sum_squares']
# greater atols needs for 'sum' and 'sum_squares', also if input scale/bias is applied
if no_equiv_scale_bias:
tols = [(1e-2, 2e-2), (1e-2, 2), (1e-2, 2)]
else:
# 'sum' seems to have a large span (e.g. -400K -> +400K) so a large absolute tolerance
# is needed to cover those cases when the result is near 0 and rtol can't help.
# One possible source of the large sum tolerance is the internal rounding of the
# mean to fp16. Any rounding amount will give a bias to the conv inputs and so the sum.
# 'sum_squares' doesn't have this issue because rtol handles the always-positive result.
per_element_atol = 5e-3
sum_atol = feature_plane_elements * per_element_atol
tols = [(1e-2, 1e-1), (5e-2, sum_atol), (1e-2, 2)]
num_outputs = 3 if output_stats else 1
for idx in range(num_outputs):
out_name = outputs[idx]
conv_data = conv_outputs[idx]
norm_conv_data = norm_conv_outputs[idx]
(rtol, atol) = tols[idx]
assert_almost_equal(conv_data, norm_conv_data, rtol=rtol, atol=atol,
names=('conv_{}'.format(out_name),
'norm_conv_{}'.format(out_name)))
# Test input normalization function only: no_equiv_scale_bias = False, 1x1 unity-weights conv
# Also test with 'relu' activation on and off.
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
num_filter = 32
outshape = (n, num_filter, h, w)
stride = (1,1)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
kernel_shape = (1, 1)
pad = (0, 0)
output_stats = False
act_type = 'relu' if _random_boolean() else None
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
convolution_stats_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_equiv_scale_bias=False, no_conv=True)
# Test convolution and stats-gen functions, first without, then with, input normalization.
# Also test with 'relu' activation on and off.
for no_equiv_scale_bias in [True, False]:
if no_equiv_scale_bias:
print('\nTest of convolution function, without input normalization.')
else:
print('\nTest of convolution function with input normalization.')
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
(stride_h, stride_w) = (1,1)
# Leverage next test case (if available) to determine outshape, strides
if i == len(nchw_shapes)-1:
num_filter = nchw_shapes[i][1]
else:
num_filter = nchw_shapes[i+1][1]
if nchw_shapes[i+1][2] < nchw_shapes[i][2]:
stride_h = nchw_shapes[i][2] // nchw_shapes[i+1][2]
if nchw_shapes[i+1][3] < nchw_shapes[i][3]:
stride_w = nchw_shapes[i][3] // nchw_shapes[i+1][3]
stride = (stride_h, stride_w)
outshape = (n, num_filter, h // stride_h, w // stride_w)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
# Only 3x3 kernel supports strides, not 1x1
kernel_shapes = [(3, 3),] if stride_h > 1 or stride_w > 1 else [(1, 1), (3, 3)]
for kernel_shape in kernel_shapes:
# padding doesn't make sense for a 1x1 kernel
pads = [(0, 0),] if kernel_shape[0] == 1 or kernel_shape[1] == 1 else [(0, 0), (1, 1)]
for pad in pads:
act_type = 'relu' if _random_boolean() else None
output_stats = _random_boolean()
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
convolution_stats_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_equiv_scale_bias=no_equiv_scale_bias, no_conv=False)
@with_seed()
def test_finalize_with_normalized_convolution():
ctx = default_context()
min_cuda_arch = 70
max_cuda_arch = 86
cuda_arch = mx.context.gpu_sm_arch(ctx.device_id)
if cuda_arch < min_cuda_arch or cuda_arch > max_cuda_arch:
print('Bypassing normalized convolution test on cuda arch {} ({} <= arch <= {}).'.format(
cuda_arch, min_cuda_arch, max_cuda_arch))
return
# RN50 layer shapes
nchw_shapes = [
( 64, 256, 56, 56),
( 64, 128, 28, 28),
( 64, 512, 28, 28),
( 64, 256, 14, 14),
( 64, 1024, 14, 14),
( 64, 512, 7, 7),
( 64, 2048, 7, 7),
(128, 64, 56, 56),
(128, 256, 56, 56),
(128, 128, 28, 28),
(128, 512, 28, 28),
(128, 256, 14, 14),
(128, 1024, 14, 14),
(128, 512, 7, 7),
(128, 2048, 7, 7),
]
# Make dataset stats (to input to BNStatsFinalize)
def create_input_stats_np(data_np):
data_fp32_np = data_np.astype(np.float32)
not_feature_axes = (0, 1, 2)
feature_sum_np = data_fp32_np.sum(axis=not_feature_axes)
feature_sum_squares_np = np.square(data_fp32_np).sum(axis=not_feature_axes)
return (feature_sum_np, feature_sum_squares_np)
def create_output_stats(data, output_stats):
if output_stats:
data_fp32 = mx.sym.cast(data, np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
feature_sum_squares = data_fp32.square().sum(axis=not_feature_axes)
return mx.sym.Group([data, feature_sum, feature_sum_squares])
else:
return data
def out_shape(nhwc_inshape, num_filters, kernel_shape, stride, pad):
(n, h, w, _) = nhwc_inshape
(kernel_h, kernel_w) = kernel_shape
(stride_h, stride_w) = stride
(pad_h, pad_w) = pad
out_shape_h = 1 + ((h + 2 * pad_h - kernel_h) // stride_h)
out_shape_w = 1 + ((w + 2 * pad_w - kernel_w) // stride_w)
return (n, out_shape_h, out_shape_w, num_filters)
# flip a dataset about the 1st dimension
def flip(data):
return mx.sym.flip(data, axis=0)
# return a new symbol that isolates the input symbol's outputs
def buffer(sym):
num_outputs = len(sym.list_outputs())
if num_outputs == 1:
return flip(flip(sym))
else:
flipped_outputs = [ flip(flip(sym[i])) for i in range(num_outputs)]
return mx.sym.Group(flipped_outputs)
# Test fused op without input normalization. Options for activation and output of stats.
def finalize_norm_conv_test(nchw_inshape, kernel_shape, num_filter, act_type, stride,
pad, output_stats, no_equiv_scale_bias, no_conv,
eps, momentum):
# If we are disabling the convolution (no_conv = True), then set kernel=1x1 and weights 1
if no_conv:
if kernel_shape != (1,1):
print('Ignoring kernel_shape {}, forcing 1x1 in no_conv mode.'.format(kernel_shape))
kernel_shape = (1,1)
(n, c, h, w) = nchw_inshape
X = mx.sym.Variable('X')
W = mx.sym.Variable('W')
SUM = mx.sym.Variable('SUM')
SUMSQ = mx.sym.Variable('SUMSQ')
B = mx.sym.Variable('B') # beta, i.e. bias
G = mx.sym.Variable('G') # gamma, i.e. scale
# randomly insert buffering here to exercise in-place vs. copy of gamma/beta by Finalize
if _random_boolean():
B = buffer(B)
G = buffer(G)
MovMean = mx.sym.Variable('MovMean')
MovVar = mx.sym.Variable('MovVar')
# make 'ground truth' symbol using standard Batchnorm and Convolution
if no_equiv_scale_bias and (act_type is None):
normalized = X
elif no_equiv_scale_bias:
normalized = mx.sym.Activation(data=X, act_type=act_type)
else:
normalized = mx.sym.BatchNorm(data=X, gamma=G, beta=B, act_type=act_type,
moving_mean=MovMean, moving_var=MovVar,
eps=eps, momentum=momentum, fix_gamma=False,
use_global_stats=False, output_mean_var=False,
cudnn_off=False, name=None, axis=-1)
(r, s) = kernel_shape
layout = 'NHWC'
conv_args = {'weight':W, 'num_filter':num_filter, 'kernel':kernel_shape,
'stride':stride, 'pad':pad, 'layout':layout, 'name':'conv'}
# For test failure reproducibility, fix the algos of the golden copy
conv_sym = mx.sym.Convolution(data=normalized, no_bias=True,
cudnn_algo_fwd=1,
cudnn_algo_bwd_data=1,
cudnn_algo_bwd_filter=1,
**conv_args)
conv_sym = create_output_stats(conv_sym, output_stats)
# make symbol-under-test using Finalize and NormalizedConvolution
if not no_equiv_scale_bias:
elem_count = np.prod(nchw_inshape) // c
(equiv_scale, equiv_bias, saved_mean, saved_inv_std, gamma_out, beta_out) = \
mx.sym.BNStatsFinalize(sum=SUM, sum_squares=SUMSQ, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar, eps=eps,
momentum=momentum, fix_gamma=False,
output_mean_var=True, elem_count=elem_count)
# NormalizedConvolution makes use of conv_args but for stats-apply mode has more inputs:
conv_args.update({'equiv_scale':equiv_scale, 'equiv_bias':equiv_bias,
'mean':saved_mean, 'var':saved_inv_std,
'gamma':gamma_out, 'beta':beta_out})
norm_conv_sym = mx.sym.NormalizedConvolution(X, act_type=act_type,
no_equiv_scale_bias=no_equiv_scale_bias,
**conv_args)
if not output_stats:
# discard sum and sum_squares outputs before binding
norm_conv_sym = norm_conv_sym[0]
# make data inputs
weight_shape = (num_filter, r, s, c)
data_shape = (n, h, w, c)
# x_np = np.fromfunction(lambda n, h, w, c: 3*((n+h+w)%2), data_shape)
# x = mx.nd.array(x_np, dtype=np.float16, ctx=ctx)
x = mx.ndarray.random.uniform(-0.5, 0.5, data_shape, dtype=np.float16, ctx=ctx)
(feature_sum_np, feature_sum_squares_np) = create_input_stats_np(x.asnumpy())
sum = mx.nd.array(feature_sum_np, dtype=np.float32)
sum_squares = mx.nd.array(feature_sum_squares_np, dtype=np.float32)
equiv_scale_bias_shape = (c,)
scale_max = 1.25
bias_max = 1
# Comparing gradients of two symbols is tricky when a non-smooth function like 'relu'
# is part of the function. We ensure that no relu inputs are near 0 (within a threshold)
# by trying different beta/gamma values as needed.
b_np = np.zeros(equiv_scale_bias_shape, dtype=np.float32)
g_np = np.zeros(equiv_scale_bias_shape, dtype=np.float32)
indices_to_set = np.array(range(c))
while len(indices_to_set) > 0:
for index in indices_to_set:
b_np[index] = np.random.uniform(-bias_max, bias_max)
g_np[index] = np.random.uniform(1.0/scale_max, scale_max)
b = mx.nd.array(b_np, dtype=np.float32, ctx=ctx)
g = mx.nd.array(g_np, dtype=np.float32, ctx=ctx)
smallest_norm_fp16 = pow(2, -14)
threshold = smallest_norm_fp16 / 2
need_data_check = not no_equiv_scale_bias and act_type == 'relu'
if need_data_check:
indices_to_set = _has_near_zero_outputs(x, b, g, eps, threshold=threshold)
else:
indices_to_set = []
# mov_mean_np = np.zeros(equiv_scale_bias_shape).astype(np.float32)
# mov_var_np = np.ones(equiv_scale_bias_shape).astype(np.float32)
mov_mean_np = np.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape)
mov_var_np = np.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape)
# since the models change the moving mean and variance, each model gets their own copy
mov_mean1 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_mean2 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_var1 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
mov_var2 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
if no_conv:
weights = mx.ndarray.ones(weight_shape, dtype=np.float16, ctx=ctx)
else:
weights = mx.ndarray.random.uniform(-0.20, 0.20, weight_shape, dtype=np.float16, ctx=ctx)
# These are the tensor's that receive the backpropped gradients (so an output of backward())
# Copy 1 is for 'ground truth' symbol based on BatchNorm/Convolution ops
d_x_out_gt = mx.ndarray.zeros(data_shape, dtype=np.float16, ctx=ctx)
d_w_out_gt = mx.ndarray.zeros(weight_shape, dtype=np.float16, ctx=ctx)
d_gamma_out_gt = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
d_beta_out_gt = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
# Copy 2 is for symbol based on BNStatsFinalize/NormalizedConvolution ops (=ones, not zeros)
d_x_out = mx.ndarray.ones(data_shape, dtype=np.float16, ctx=ctx)
d_w_out = mx.ndarray.ones(weight_shape, dtype=np.float16, ctx=ctx)
d_gamma_out = mx.ndarray.ones(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
d_beta_out = mx.ndarray.ones(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
# bind i/o's to symbols to create executors
grad_req = {'SUM':'null', 'SUMSQ':'null', 'MovMean':'null', 'MovVar':'null',
'X':'write', 'W':'write', 'G':'write', 'B':'write'}
args_grad_dict_gt = {'X':d_x_out_gt, 'W':d_w_out_gt, 'G':d_gamma_out_gt, 'B':d_beta_out_gt}
args_grad_dict = {'X':d_x_out, 'W':d_w_out, 'G':d_gamma_out, 'B':d_beta_out}
args_dict = {'X':x, 'W':weights}
# conv binding does not need SUM, and SUMSQ, but extra items are OK
if not no_equiv_scale_bias:
args_dict.update({'B':b, 'G':g, 'SUM':sum, 'SUMSQ':sum_squares})
gt_aux_states_dict = \
{'MovMean':mov_mean1, 'MovVar':mov_var1}
finalize_aux_states_dict = \
{'MovMean':mov_mean2, 'MovVar':mov_var2}
conv_exe = conv_sym.bind(ctx=ctx, args=args_dict, args_grad=args_grad_dict_gt,
aux_states=gt_aux_states_dict, grad_req=grad_req)
norm_conv_exe = norm_conv_sym.bind(ctx=ctx, args=args_dict, args_grad=args_grad_dict,
aux_states=finalize_aux_states_dict, grad_req=grad_req)
# Execute forward() graph calculation
# need is_train=True to keep Batchnorm using the mini-batch mean and variance
conv_outputs = conv_exe.forward(is_train=True)
# need is_train=True to keep stats from being turned off
norm_conv_outputs = norm_conv_exe.forward(is_train=True)
# Check forward outputs
outputs = ['out', 'sum', 'sum_squares']
# greater atols needs for 'sum' and 'sum_squares', also if input scale/bias is applied
if no_equiv_scale_bias:
tols = [(1e-2, 2e-2), (1e-2, 2), (1e-2, 2)]
else:
# 'sum' seems to have a large span (e.g. -400K -> +400K) so a large absolute tolerance
# is needed to cover those cases when the result is near 0 and rtol can't help.
# One possible source of the large sum tolerance is the internal rounding of the
# mean to fp16. Any rounding amount will give a bias to the conv inputs and so the sum.
# 'sum_squares' doesn't have this issue because rtol handles the always-positive result.
per_element_atol = 5e-3
sum_atol = n * h * w * per_element_atol
tols = [(1e-2, 1e-1), (1e-1, sum_atol), (1e-2, 2)]
num_outputs = 3 if output_stats else 1
for idx in range(num_outputs):
out_name = outputs[idx]
conv_data = conv_outputs[idx]
norm_conv_data = norm_conv_outputs[idx]
(rtol, atol) = tols[idx]
assert_almost_equal(conv_data, norm_conv_data, rtol=rtol, atol=atol,
names=('conv_{}'.format(out_name),
'norm_conv_{}'.format(out_name)))
# Check backward function
if no_equiv_scale_bias and act_type is not None:
# gradient calculation not supported for this configuration
return
# Create backward gradients
outshape = out_shape(data_shape, num_filter, kernel_shape, stride, pad)
d_out_in = mx.ndarray.random.uniform(-0.2, 0.2, outshape,
dtype=np.float16, ctx=ctx)
# not really needed
sum_shape = (num_filter,)
# gradients on these outputs will be summed into the d_out_in for the ground truth
# symbol, so make sure these are 0.
d_sum_in = mx.ndarray.zeros(sum_shape, dtype=np.float32, ctx=ctx)
d_sum_squares_in = mx.ndarray.zeros(sum_shape, dtype=np.float32, ctx=ctx)
# d_sum_in = mx.ndarray.random.uniform(0.0, 1.0, sum_shape,
# dtype=np.float32, ctx=ctx)
# d_sum_squares_in = mx.ndarray.random.uniform(0.0, 1.0, sum_shape,
# dtype=np.float32, ctx=ctx)
# Execute backward() graph calculation
if output_stats:
conv_outputs = conv_exe.backward([d_out_in, d_sum_in, d_sum_squares_in])
norm_conv_outputs = norm_conv_exe.backward([d_out_in, d_sum_in, d_sum_squares_in])
else:
conv_outputs = conv_exe.backward([d_out_in,])
norm_conv_outputs = norm_conv_exe.backward([d_out_in,])
# Check weight gradient
out_name = 'd_w'
assert_almost_equal(d_w_out_gt, d_w_out, atol=0.3, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Check data gradient
# This check is flakey when act_type = relu because if the two models differ on whether
# the normalized value is above or below 0, then the gradient may or may-not be backpropped.
# To fix this test, we could run a separate model with relu off, capture the normalized
# output and then mask off the gradient comparison when the normalized value is near 0.
out_name = 'd_x'
if act_type is None:
assert_almost_equal(d_x_out_gt, d_x_out, atol=0.1, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Check gamma and beta gradients
out_name = 'd_gamma'
assert_almost_equal(d_gamma_out_gt, d_gamma_out, atol=10, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
out_name = 'd_beta'
assert_almost_equal(d_beta_out_gt, d_beta_out, atol=10, rtol=0.1,
names=('conv_{}'.format(out_name), 'norm_conv_{}'.format(out_name)))
# Test input normalization function only: no_equiv_scale_bias = False, 1x1 unity-weights conv
# Also test with 'relu' activation on and off.
print('\nTest of input normalization without convolution function.')
eps = 1e-4
momentum = 0.9
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
num_filter = 32
outshape = (n, num_filter, h, w)
stride = (1,1)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
kernel_shape = (1, 1)
pad = (0, 0)
output_stats = False
act_type = 'relu' if _random_boolean() else None
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
finalize_norm_conv_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_equiv_scale_bias=False,
no_conv=True, eps=eps, momentum=momentum)
# Test convolution and stats-gen functions, first without, then with, input normalization.
# Also test with 'relu' activation on and off.
# for no_equiv_scale_bias in [False, True]:
for no_equiv_scale_bias in [True, False]:
if no_equiv_scale_bias:
print('\nTest of convolution function, without input normalization.')
else:
print('\nTest of convolution function with input normalization.')
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
(n, c, h, w) = inshape
(stride_h, stride_w) = (1,1)
# Leverage next test case (if available) to determine outshape, strides
if i == len(nchw_shapes)-1:
num_filter = nchw_shapes[i][1]
else:
num_filter = nchw_shapes[i+1][1]
if nchw_shapes[i+1][2] < nchw_shapes[i][2]:
stride_h = nchw_shapes[i][2] // nchw_shapes[i+1][2]
if nchw_shapes[i+1][3] < nchw_shapes[i][3]:
stride_w = nchw_shapes[i][3] // nchw_shapes[i+1][3]
stride = (stride_h, stride_w)
outshape = (n, num_filter, h // stride_h, w // stride_w)
print('nchw inshape = {}, outshape = {}, stride = {}'.format(inshape, outshape, stride))
# Only 3x3 kernel supports strides, not 1x1
# kernel_shapes = [(1, 1),]
kernel_shapes = [(3, 3),] if stride_h > 1 or stride_w > 1 else [(1, 1), (3, 3)]
for kernel_shape in kernel_shapes:
# padding doesn't make sense for a 1x1 kernel
pads = [(0, 0),] if kernel_shape[0] == 1 or kernel_shape[1] == 1 else [(0, 0), (1, 1)]
for pad in pads:
act_type = 'relu' if _random_boolean() else None
output_stats = _random_boolean()
print(' kernel= {}, pad = {}, output_stats={}, act_type = {}'.format(
kernel_shape, pad, output_stats, act_type))
finalize_norm_conv_test(nchw_inshape=inshape, kernel_shape=kernel_shape,
num_filter=num_filter, act_type=act_type,
stride=stride, pad=pad, output_stats=output_stats,
no_equiv_scale_bias=no_equiv_scale_bias,
no_conv=False, eps=eps, momentum=momentum)
@with_seed()
def test_bn_stats_finalize():
ctx = default_context()
min_cuda_arch = 70
cuda_arch = mx.context.gpu_sm_arch(ctx.device_id)
if cuda_arch < min_cuda_arch:
print('Bypassing bn_stats_finalize test on cuda arch {} (need >= {}).'.format(
cuda_arch, min_cuda_arch))
return
nchw_shapes = [
# n*h*w == 2 included to flush out 'sample' vs. 'population' variance issues
( 1, 8, 1, 2),
# RN50 layer shapes
( 64, 256, 56, 56),
( 64, 128, 28, 28),
( 64, 512, 28, 28),
( 64, 256, 14, 14),
( 64, 1024, 14, 14),
( 64, 512, 7, 7),
( 64, 2048, 7, 7),
(128, 64, 56, 56),
(128, 256, 56, 56),
(128, 128, 28, 28),
(128, 512, 28, 28),
(128, 256, 14, 14),
(128, 1024, 14, 14),
(128, 512, 7, 7),
(128, 2048, 7, 7),
]
# Prepare the input for a standard Convolution so it will mimic NormalizedConvolution
def normalize_input(data, equiv_scale, equiv_bias, act_type, no_equiv_scale_bias):
normalized = data if no_equiv_scale_bias else \
mx.sym.broadcast_add(mx.sym.broadcast_mul(data, equiv_scale),
equiv_bias)
return normalized if act_type is None else mx.sym.Activation(normalized, act_type=act_type)
# Make dataset stats (to input to BNStatsFinalize)
def create_output_stats(data):
data_fp32 = mx.sym.cast(data, np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
feature_sum_squares = data_fp32.square().sum(axis=not_feature_axes)
return (feature_sum, feature_sum_squares)
# flip a dataset about the 1st dimension
def flip(data):
return mx.sym.flip(data, axis=0)
# return a new symbol that isolates the input symbol's outputs
def buffer(sym):
num_outputs = len(sym.list_outputs())
if num_outputs == 1:
return flip(flip(sym))
else:
flipped_outputs = [ flip(flip(sym[i])) for i in range(num_outputs)]
return mx.sym.Group(flipped_outputs)
# Test of BNStatsFinalize op against a 'ground truth' of Batchnorm and home-grown functions.
def bn_stats_finalize_test(nchw_inshape, eps, momentum, is_train, test_writeinplace):
(n, c, h, w) = nchw_inshape
elem_count = np.prod(nchw_inshape) // c
X = mx.sym.Variable('X')
G = mx.sym.Variable('G') # gamma, i.e. scale
B = mx.sym.Variable('B') # beta, i.e. bias
if (test_writeinplace):
G = buffer(G)
B = buffer(B)
MovMean = mx.sym.Variable('MovMean')
MovVar = mx.sym.Variable('MovVar')
# Make ground truth (i.e. 'gt') model using conventional cudnn Batchnorm, which processes
# the running mean using the 'sample variance' with N = elem_count - 1. To avoid use of
# the NHWCBatchnorm, which uses 'population variance', we transpose around the Batchnorm op.
# The input data 'X' starts in 'NHWC'.
# For NHWC -> NCHW, axes=(0,3,1,2)
transposed = mx.sym.transpose(data=X, axes=(0,3,1,2))
(data, saved_mean, saved_inv_std) = mx.sym.BatchNorm(data=transposed, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar,
eps=eps, momentum=momentum, fix_gamma=False,
use_global_stats=False, output_mean_var=True,
cudnn_off=False, name=None, axis=1)
# For NCHW -> NHWC axes=(0,2,3,1)
data = mx.sym.transpose(data=data, axes=(0,2,3,1))
equiv_scale_inf_fp32 = G / mx.sym.sqrt(MovVar + eps)
equiv_scale_inf = mx.sym.cast(equiv_scale_inf_fp32, dtype=np.float16)
equiv_bias_inf_fp32 = B - G * MovMean / mx.sym.sqrt(MovVar + eps)
equiv_bias_inf = mx.sym.cast(equiv_bias_inf_fp32, dtype=np.float16)
(sum, sum_squares) = create_output_stats(X)
batch_mean_fp32 = sum / elem_count
batch_variance_fp32 = sum_squares / elem_count - mx.sym.square(batch_mean_fp32)
equiv_scale_train_fp32 = G / mx.sym.sqrt(batch_variance_fp32 + eps)
equiv_scale_train = mx.sym.cast(equiv_scale_train_fp32, dtype=np.float16)
equiv_bias_train_fp32 = B - G * batch_mean_fp32 / mx.sym.sqrt(batch_variance_fp32 + eps)
equiv_bias_train = mx.sym.cast(equiv_bias_train_fp32, dtype=np.float16)
# Leave bn data as part of symbol output in case operator doesn't like req[kOut]==kNullOp
if is_train:
gt_sym = mx.sym.Group([equiv_scale_train, equiv_bias_train,
saved_mean, saved_inv_std, data])
else:
gt_sym = mx.sym.Group([equiv_scale_inf, equiv_bias_inf, data])
# Make BNStatsFinalize model, uses sum and sum_squares created above based on the data
finalize_sym = mx.sym.BNStatsFinalize(sum=sum, sum_squares=sum_squares, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar, eps=eps,
momentum=momentum, fix_gamma=False,
output_mean_var=is_train, elem_count=elem_count)
if (test_writeinplace):
finalize_sym = buffer(finalize_sym)
data_shape = (n, h, w, c)
x = mx.ndarray.random.uniform(-0.5, 0.5, data_shape, dtype=np.float16, ctx=ctx)
equiv_scale_bias_shape = (c,)
scale_max = 1.25
bias_max = 1
b = mx.ndarray.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
g = mx.ndarray.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
mov_mean_np = np.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape)
mov_var_np = np.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape)
# since the models change the moving mean and variance, each model gets their own copy
mov_mean1 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_mean2 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_var1 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
mov_var2 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
args_dict = {'X':x, 'B':b, 'G':g}
gt_aux_states_dict =\
{'MovMean':mov_mean1, 'MovVar':mov_var1}
finalize_aux_states_dict =\
{'MovMean':mov_mean2, 'MovVar':mov_var2}
gt_exe = gt_sym.bind(ctx=ctx, args=args_dict,
aux_states=gt_aux_states_dict, grad_req='null')
finalize_exe = finalize_sym.bind(ctx=ctx, args=args_dict,
aux_states=finalize_aux_states_dict, grad_req='null')
gt_outputs = gt_exe.forward(is_train=is_train)
finalize_outputs = finalize_exe.forward(is_train=is_train)
outputs = ['equiv_scale', 'equiv_bias', 'saved_mean', 'saved_var']
tols = [(1e-2, 1e-2), (1e-2, 1e-2), (1e-2, 1e-2), (1e-2, 1e-2)]
num_outputs = 4 if is_train else 2
for idx in range(num_outputs):
out_name = outputs[idx]
gt_data = gt_outputs[idx]
finalize_data = finalize_outputs[idx]
(rtol, atol) = tols[idx]
assert_almost_equal(gt_data, finalize_data, rtol=rtol, atol=atol,
names=('gt_{}'.format(out_name),
'finalize_{}'.format(out_name)))
if is_train:
for aux_name in ['MovMean', 'MovVar']:
gt_data = gt_exe.aux_dict[aux_name]
finalize_data = finalize_exe.aux_dict[aux_name]
assert_almost_equal(gt_data, finalize_data, rtol=rtol, atol=atol,
names=('gt_{}'.format(aux_name),
'finalize_{}'.format(aux_name)))
# Also test finalize ability to propagate beta and gamma
# gamma is output index 4
gamma_out = finalize_outputs[4]
assert_almost_equal(gamma_out, g, atol=0.0, rtol=0.0)
# gamma is output index 5
beta_out = finalize_outputs[5]
assert_almost_equal(beta_out, b, atol=0.0, rtol=0.0)
# Now test BNStatsFinalize ability to backprop gradient in a training graph
if is_train:
S = mx.sym.Variable('S') # sum
SS = mx.sym.Variable('SS') # sum_squares
finalize_sym = mx.sym.BNStatsFinalize(sum=S, sum_squares=SS, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar, eps=eps,
momentum=momentum, fix_gamma=False,
output_mean_var=is_train, elem_count=elem_count)
s = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
ss = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
b = mx.ndarray.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
g = mx.ndarray.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
args_dict = {'S':s, 'SS':ss, 'B':b, 'G':g}
grad_req = {'S':'null', 'SS':'null', 'G':'write', 'B':'write'}
d_gamma_out = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
d_beta_out = mx.ndarray.zeros(equiv_scale_bias_shape, dtype=np.float32, ctx=ctx)
args_grad_dict = {'G':d_gamma_out, 'B':d_beta_out}
if (test_writeinplace):
finalize_sym = buffer(finalize_sym)
finalize_exe = finalize_sym.bind(ctx=ctx, args=args_dict, args_grad=args_grad_dict,
aux_states=finalize_aux_states_dict, grad_req=grad_req)
finalize_exe.forward(is_train=is_train)
d_equiv_scale_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float16, ctx=ctx)
d_equiv_bias_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float16, ctx=ctx)
d_mean_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
d_inv_stddev_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
d_gamma_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
d_beta_in = mx.ndarray.random.uniform(0.0, 1.0, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
finalize_exe.backward([d_equiv_scale_in, d_equiv_bias_in,
d_mean_in, d_inv_stddev_in, d_gamma_in, d_beta_in])
assert_almost_equal(d_gamma_in, d_gamma_out, atol=0.0, rtol=0.0)
assert_almost_equal(d_beta_in, d_beta_out, atol=0.0, rtol=0.0)
# Test BNStatsFinalize op in both inference and training modes
for is_train in [False, True]:
for test_writeinplace in [False, True]:
# writeinplace test only relevant for training graphs
if not is_train and test_writeinplace:
continue
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
eps = 1e-4
momentum = 0.9
bn_stats_finalize_test(inshape, eps, momentum, is_train, test_writeinplace)
@with_seed()
def test_norm_convolution_finalize():
ctx = default_context()
min_cuda_arch = 70
max_cuda_arch = 86
cuda_arch = mx.context.gpu_sm_arch(ctx.device_id)
if cuda_arch < min_cuda_arch or cuda_arch > max_cuda_arch:
print('Bypassing normalized convolution test on cuda arch {} ({} <= arch <= {}).'.format(
cuda_arch, min_cuda_arch, max_cuda_arch))
return
nchw_shapes = [
# n*h*w == 2 included to flush out 'sample' vs. 'population' variance issues
( 1, 32, 1, 2),
# RN50 layer shapes
( 64, 256, 56, 56),
( 64, 128, 28, 28),
( 64, 512, 28, 28),
( 64, 256, 14, 14),
( 64, 1024, 14, 14),
( 64, 512, 7, 7),
( 64, 2048, 7, 7),
(128, 64, 56, 56),
(128, 256, 56, 56),
(128, 128, 28, 28),
(128, 512, 28, 28),
(128, 256, 14, 14),
(128, 1024, 14, 14),
(128, 512, 7, 7),
(128, 2048, 7, 7),
]
# Make dataset stats (to input to BNStatsFinalize)
def create_output_stats(data):
data_fp32 = mx.sym.cast(data, np.float32)
not_feature_axes = (0, 1, 2)
feature_sum = data_fp32.sum(axis=not_feature_axes)
feature_sum_squares = data_fp32.square().sum(axis=not_feature_axes)
return (feature_sum, feature_sum_squares)
# Test of BNStatsFinalize op against a 'ground truth' of Batchnorm and home-grown functions.
def bn_stats_finalize_test(nchw_inshape, eps, momentum, is_train):
(n, c, h, w) = nchw_inshape
elem_count = np.prod(nchw_inshape) // c
X = mx.sym.Variable('X')
G = mx.sym.Variable('G') # gamma, i.e. scale
B = mx.sym.Variable('B') # beta, i.e. bias
W = mx.sym.Variable('W') # weight, dummy value to keep NormConvolution happy
MovMean = mx.sym.Variable('MovMean')
MovVar = mx.sym.Variable('MovVar')
# Make ground truth (i.e. 'gt') model using conventional cudnn Batchnorm, which processes
# the running mean using the 'sample variance' with N = elem_count - 1. To avoid use of
# the NHWCBatchnorm, which uses 'population variance', we transpose around the Batchnorm op.
# The input data 'X' starts in 'NHWC'.
# For NHWC -> NCHW, axes=(0,3,1,2)
transposed = mx.sym.transpose(data=X, axes=(0,3,1,2))
(data, saved_mean, saved_inv_std) = mx.sym.BatchNorm(data=transposed, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar,
eps=eps, momentum=momentum, fix_gamma=False,
use_global_stats=False, output_mean_var=True,
cudnn_off=False, name=None, axis=1)
# For NCHW -> NHWC axes=(0,2,3,1)
data = mx.sym.transpose(data=data, axes=(0,2,3,1))
equiv_scale_inf_fp32 = G / mx.sym.sqrt(MovVar + eps)
equiv_scale_inf = mx.sym.cast(equiv_scale_inf_fp32, dtype=np.float16)
equiv_bias_inf_fp32 = B - G * MovMean / mx.sym.sqrt(MovVar + eps)
equiv_bias_inf = mx.sym.cast(equiv_bias_inf_fp32, dtype=np.float16)
(sum, sum_squares) = create_output_stats(X)
batch_mean_fp32 = sum / elem_count
batch_variance_fp32 = sum_squares / elem_count - mx.sym.square(batch_mean_fp32)
equiv_scale_train_fp32 = G / mx.sym.sqrt(batch_variance_fp32 + eps)
equiv_scale_train = mx.sym.cast(equiv_scale_train_fp32, dtype=np.float16)
equiv_bias_train_fp32 = B - G * batch_mean_fp32 / mx.sym.sqrt(batch_variance_fp32 + eps)
equiv_bias_train = mx.sym.cast(equiv_bias_train_fp32, dtype=np.float16)
# Leave bn data as part of symbol output in case operator doesn't like req[kOut]==kNullOp
if is_train:
gt_sym = mx.sym.Group([saved_mean, saved_inv_std,
equiv_scale_train, equiv_bias_train, data])
else:
gt_sym = mx.sym.Group([equiv_scale_inf, equiv_bias_inf, data])
# Make BNStatsFinalize model, uses sum and sum_squares created above based on the data
num_filter = 32
(data, _, _, saved_mean, saved_inv_std, equiv_scale, equiv_bias) = \
mx.sym.NormConvolution(data=X, weight=W, kernel=(1,1), num_filter=num_filter,
in_sum=sum, in_sum_squares=sum_squares, gamma=G, beta=B,
moving_mean=MovMean, moving_var=MovVar, eps=eps,
momentum=momentum, fix_gamma=False,
output_mean_var=True,
no_norm=False,
output_equiv_scale_bias=True, layout='NHWC')
if is_train:
finalize_sym = mx.sym.Group([saved_mean, saved_inv_std,
equiv_scale, equiv_bias, data])
else:
finalize_sym = mx.sym.Group([equiv_scale, equiv_bias, data])
data_shape = (n, h, w, c)
x = mx.ndarray.random.uniform(-0.5, 0.5, data_shape, dtype=np.float16, ctx=ctx)
equiv_scale_bias_shape = (c,)
w = mx.nd.zeros((num_filter,1,1,c), dtype=np.float16, ctx=ctx)
scale_max = 1.25
bias_max = 1
b = mx.ndarray.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
g = mx.ndarray.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape,
dtype=np.float32, ctx=ctx)
mov_mean_np = np.random.uniform(-bias_max, bias_max, equiv_scale_bias_shape)
mov_var_np = np.random.uniform(1.0/scale_max, scale_max, equiv_scale_bias_shape)
# since the models change the moving mean and variance, each model gets their own copy
mov_mean1 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_mean2 = mx.nd.array(mov_mean_np, dtype=np.float32, ctx=ctx)
mov_var1 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
mov_var2 = mx.nd.array(mov_var_np, dtype=np.float32, ctx=ctx)
args_dict = {'X':x, 'B':b, 'G':g, 'W':w}
gt_aux_states_dict =\
{'MovMean':mov_mean1, 'MovVar':mov_var1}
finalize_aux_states_dict =\
{'MovMean':mov_mean2, 'MovVar':mov_var2}
gt_exe = gt_sym.bind(ctx=ctx, args=args_dict,
aux_states=gt_aux_states_dict, grad_req='null')
finalize_exe = finalize_sym.bind(ctx=ctx, args=args_dict,
aux_states=finalize_aux_states_dict, grad_req='null')
finalize_outputs = finalize_exe.forward(is_train=is_train)
gt_outputs = gt_exe.forward(is_train=is_train)
if is_train:
outputs = ['saved_mean', 'saved_var', 'equiv_scale', 'equiv_bias']
else:
outputs = ['equiv_scale', 'equiv_bias']
tols = [(1e-2, 1e-2), (1e-2, 1e-2), (1e-2, 1e-2), (1e-2, 1e-2)]
for idx, out_name in enumerate(outputs):
finalize_data = finalize_outputs[idx]
gt_data = gt_outputs[idx]
(rtol, atol) = tols[idx]
assert_almost_equal(gt_data, finalize_data, rtol=rtol, atol=atol,
names=('gt_{}'.format(out_name),
'finalize_{}'.format(out_name)))
if is_train:
for aux_name in ['MovMean', 'MovVar']:
gt_data = gt_exe.aux_dict[aux_name]
finalize_data = finalize_exe.aux_dict[aux_name]
assert_almost_equal(gt_data, finalize_data, rtol=rtol, atol=atol,
names=('gt_{}'.format(aux_name),
'finalize_{}'.format(aux_name)))
# Test BNStatsFinalize op in both inference and training modes
for is_train in [False, True]:
for i in range(len(nchw_shapes)):
inshape = nchw_shapes[i]
eps = 1e-4
momentum = 0.9
bn_stats_finalize_test(inshape, eps, momentum, is_train)
if __name__ == '__main__':
import nose
nose.runmodule()
| 52.432679
| 105
| 0.587115
| 10,349
| 74,769
| 3.996328
| 0.051696
| 0.029015
| 0.029112
| 0.023019
| 0.921708
| 0.909352
| 0.897892
| 0.886914
| 0.88041
| 0.868635
| 0
| 0.034459
| 0.307574
| 74,769
| 1,425
| 106
| 52.469474
| 0.76439
| 0.177747
| 0
| 0.843636
| 0
| 0
| 0.04877
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 1
| 0.028182
| false
| 0.004545
| 0.007273
| 0.003636
| 0.066364
| 0.026364
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
51c215503402aafb815e9e28aee5722fd629638d
| 1,755
|
py
|
Python
|
Simulation/Test/DiseaseBank/numerical_actions.py
|
anoppa/Proyecto-IA-Sim-Comp
|
71132bd0c6cb5aeff812fd96e0017be71178a5f3
|
[
"MIT"
] | 1
|
2022-03-11T14:24:10.000Z
|
2022-03-11T14:24:10.000Z
|
Simulation/Test/DiseaseBank/numerical_actions.py
|
anoppa/Proyecto-IA-Sim-Comp
|
71132bd0c6cb5aeff812fd96e0017be71178a5f3
|
[
"MIT"
] | null | null | null |
Simulation/Test/DiseaseBank/numerical_actions.py
|
anoppa/Proyecto-IA-Sim-Comp
|
71132bd0c6cb5aeff812fd96e0017be71178a5f3
|
[
"MIT"
] | 1
|
2022-01-19T04:29:19.000Z
|
2022-01-19T04:29:19.000Z
|
from ...Simulation.environment import Environment
from random import random
def tos_action(time:int, env:Environment)->Environment:
r = random()
if r < 0.5:
if env.get_parameter('tos') != None:
env.update_parameter('tos', 0.4)
if env.get_parameter('plaqueta') != None:
env.update_parameter('plaqueta', -0.5)
else:
if env.get_parameter('tos') != None:
env.update_parameter('tos', 0.4)
if env.get_parameter('plaqueta') != None:
env.update_parameter('plaqueta', -0.8)
return env
def fiebre_action(time:int, env:Environment)->Environment:
r = random()
if r < 0.4:
if env.get_parameter('fiebre') != None:
env.update_parameter('fiebre', 1)
if env.get_parameter('plaqueta') != None:
env.update_parameter('plaqueta', -1.5)
elif r < 0.75:
if env.get_parameter('fiebre') != None:
env.update_parameter('fiebre', 1.4)
if env.get_parameter('plaqueta') != None:
env.update_parameter('plaqueta', -1.5)
else:
if env.get_parameter('fiebre') != None:
env.update_parameter('fiebre', 1.8)
if env.get_parameter('plaqueta') != None:
env.update_parameter('plaqueta', -2.0)
if env.get_parameter('dolor de cabeza') != None:
env.update_parameter('dolor de cabeza', 3)
return env
def dc_action(time:int, env:Environment)->Environment:
r = random()
if r < 0.75:
if env.get_parameter('dolor de cabeza') != None:
env.update_parameter('dolor de cabeza', 2.5)
else:
if env.get_parameter('dolor de cabeza') != None:
env.update_parameter('dolor de cabeza', 4.5)
return env
| 35.816327
| 58
| 0.597151
| 230
| 1,755
| 4.430435
| 0.147826
| 0.063788
| 0.102061
| 0.216879
| 0.878312
| 0.878312
| 0.863592
| 0.857704
| 0.857704
| 0.857704
| 0
| 0.026194
| 0.260399
| 1,755
| 48
| 59
| 36.5625
| 0.75886
| 0
| 0
| 0.590909
| 0
| 0
| 0.124217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068182
| false
| 0
| 0.045455
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
51c8e3bd0f20cb53c76dd35a3478647c769e72d0
| 517
|
py
|
Python
|
teste1.py
|
allanedgard/pdss
|
9874dfb9faa57db79e8072bfd35e79c5c16eea67
|
[
"Apache-2.0"
] | null | null | null |
teste1.py
|
allanedgard/pdss
|
9874dfb9faa57db79e8072bfd35e79c5c16eea67
|
[
"Apache-2.0"
] | null | null | null |
teste1.py
|
allanedgard/pdss
|
9874dfb9faa57db79e8072bfd35e79c5c16eea67
|
[
"Apache-2.0"
] | null | null | null |
import Buffer
import time
y = int(time.time()*1000)
time.sleep(.001)
x = Buffer.Buffer()
x.add(int(time.time()*1000)%y, 'teste 1')
x.add(int(time.time()*1000)%y, 'teste 2')
time.sleep(.001)
x.add(int(time.time()*1000)%y, 'prova 3')
x.add(int(time.time()*1000)%y, 'prova 4')
print x.getMsgs(int(time.time()*1000)%y)
time.sleep(.001)
x.add(int(time.time()*1000)%y, 'teste 1')
x.add(int(time.time()*1000)%y, 'teste 2')
time.sleep(.001)
x.add(int(time.time()*1000)%y, 'prova 3')
print x.getMsgs(int(time.time()*1000)%y)
| 27.210526
| 41
| 0.655706
| 103
| 517
| 3.291262
| 0.174757
| 0.20649
| 0.324484
| 0.442478
| 0.811209
| 0.811209
| 0.811209
| 0.811209
| 0.566372
| 0.566372
| 0
| 0.12369
| 0.077369
| 517
| 18
| 42
| 28.722222
| 0.587002
| 0
| 0
| 0.705882
| 0
| 0
| 0.094961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.117647
| null | null | 0.117647
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
51cf935e9e313b47a478419c2bfca9086c3cd1ed
| 84,200
|
py
|
Python
|
sample/web/app/mautic_models.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | 1
|
2021-02-20T08:48:05.000Z
|
2021-02-20T08:48:05.000Z
|
sample/web/app/mautic_models.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
sample/web/app/mautic_models.py
|
hdknr/django-mautic
|
aa2e5304936541c9266215df00469576142ee906
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class AssetDownloads(models.Model):
asset = models.ForeignKey('Assets', models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey('IpAddresses', models.DO_NOTHING)
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
email = models.ForeignKey('Emails', models.DO_NOTHING, blank=True, null=True)
date_download = models.DateTimeField()
code = models.IntegerField()
referer = models.TextField(blank=True, null=True)
tracking_id = models.CharField(max_length=255)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'asset_downloads'
class Assets(models.Model):
category = models.ForeignKey('Categories', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
title = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
alias = models.CharField(max_length=255)
storage_location = models.CharField(max_length=255, blank=True, null=True)
path = models.CharField(max_length=255, blank=True, null=True)
remote_path = models.CharField(max_length=255, blank=True, null=True)
original_file_name = models.CharField(max_length=255, blank=True, null=True)
lang = models.CharField(max_length=255)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
download_count = models.IntegerField()
unique_download_count = models.IntegerField()
revision = models.IntegerField()
extension = models.CharField(max_length=255, blank=True, null=True)
mime = models.CharField(max_length=255, blank=True, null=True)
size = models.IntegerField(blank=True, null=True)
disallow = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'assets'
class AuditLog(models.Model):
user_id = models.IntegerField()
user_name = models.CharField(max_length=255)
bundle = models.CharField(max_length=50)
object = models.CharField(max_length=50)
object_id = models.IntegerField()
action = models.CharField(max_length=50)
details = models.TextField(blank=True, null=True)
date_added = models.DateTimeField()
ip_address = models.CharField(max_length=45)
class Meta:
managed = False
db_table = 'audit_log'
class CacheItems(models.Model):
item_id = models.CharField(primary_key=True, max_length=255)
item_data = models.TextField()
item_lifetime = models.PositiveIntegerField(blank=True, null=True)
item_time = models.PositiveIntegerField()
class Meta:
managed = False
db_table = 'cache_items'
class CampaignEvents(models.Model):
campaign = models.ForeignKey('Campaigns', models.DO_NOTHING)
parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
type = models.CharField(max_length=50)
event_type = models.CharField(max_length=50)
event_order = models.IntegerField()
properties = models.TextField()
trigger_date = models.DateTimeField(blank=True, null=True)
trigger_interval = models.IntegerField(blank=True, null=True)
trigger_interval_unit = models.CharField(max_length=1, blank=True, null=True)
trigger_mode = models.CharField(max_length=10, blank=True, null=True)
decision_path = models.CharField(max_length=255, blank=True, null=True)
temp_id = models.CharField(max_length=255, blank=True, null=True)
channel = models.CharField(max_length=255, blank=True, null=True)
channel_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'campaign_events'
class CampaignFormXref(models.Model):
campaign = models.ForeignKey('Campaigns', models.DO_NOTHING, primary_key=True)
form = models.ForeignKey('Forms', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'campaign_form_xref'
unique_together = (('campaign', 'form'),)
class CampaignLeadEventFailedLog(models.Model):
log = models.ForeignKey('CampaignLeadEventLog', models.DO_NOTHING, primary_key=True)
date_added = models.DateTimeField()
reason = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'campaign_lead_event_failed_log'
class CampaignLeadEventLog(models.Model):
event = models.ForeignKey(CampaignEvents, models.DO_NOTHING)
lead = models.ForeignKey('Leads', models.DO_NOTHING)
campaign = models.ForeignKey('Campaigns', models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey('IpAddresses', models.DO_NOTHING, blank=True, null=True)
rotation = models.IntegerField()
date_triggered = models.DateTimeField(blank=True, null=True)
is_scheduled = models.IntegerField()
trigger_date = models.DateTimeField(blank=True, null=True)
system_triggered = models.IntegerField()
metadata = models.TextField(blank=True, null=True)
channel = models.CharField(max_length=255, blank=True, null=True)
channel_id = models.IntegerField(blank=True, null=True)
non_action_path_taken = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'campaign_lead_event_log'
unique_together = (('event', 'lead', 'rotation'),)
class CampaignLeadlistXref(models.Model):
campaign = models.ForeignKey('Campaigns', models.DO_NOTHING, primary_key=True)
leadlist = models.ForeignKey('LeadLists', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'campaign_leadlist_xref'
unique_together = (('campaign', 'leadlist'),)
class CampaignLeads(models.Model):
campaign = models.ForeignKey('Campaigns', models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
manually_removed = models.IntegerField()
manually_added = models.IntegerField()
date_last_exited = models.DateTimeField(blank=True, null=True)
rotation = models.IntegerField()
class Meta:
managed = False
db_table = 'campaign_leads'
unique_together = (('campaign', 'lead'),)
class Campaigns(models.Model):
category = models.ForeignKey('Categories', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
canvas_settings = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'campaigns'
class Categories(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
title = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
alias = models.CharField(max_length=255)
color = models.CharField(max_length=7, blank=True, null=True)
bundle = models.CharField(max_length=50)
class Meta:
managed = False
db_table = 'categories'
class ChannelUrlTrackables(models.Model):
redirect = models.ForeignKey('PageRedirects', models.DO_NOTHING, primary_key=True)
channel_id = models.IntegerField()
channel = models.CharField(max_length=255)
hits = models.IntegerField()
unique_hits = models.IntegerField()
class Meta:
managed = False
db_table = 'channel_url_trackables'
unique_together = (('redirect', 'channel_id'),)
class Companies(models.Model):
owner = models.ForeignKey('Users', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
social_cache = models.TextField(blank=True, null=True)
score = models.IntegerField(blank=True, null=True)
companyemail = models.CharField(max_length=255, blank=True, null=True)
companyaddress1 = models.CharField(max_length=255, blank=True, null=True)
companyaddress2 = models.CharField(max_length=255, blank=True, null=True)
companyphone = models.CharField(max_length=255, blank=True, null=True)
companycity = models.CharField(max_length=255, blank=True, null=True)
companystate = models.CharField(max_length=255, blank=True, null=True)
companyzipcode = models.CharField(max_length=255, blank=True, null=True)
companycountry = models.CharField(max_length=255, blank=True, null=True)
companyname = models.CharField(max_length=255, blank=True, null=True)
companywebsite = models.TextField(blank=True, null=True)
companyindustry = models.CharField(max_length=255, blank=True, null=True)
companydescription = models.TextField(blank=True, null=True)
companynumber_of_employees = models.FloatField(blank=True, null=True)
companyfax = models.CharField(max_length=255, blank=True, null=True)
companyannual_revenue = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'companies'
class CompaniesLeads(models.Model):
company = models.ForeignKey(Companies, models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
is_primary = models.IntegerField(blank=True, null=True)
manually_removed = models.IntegerField()
manually_added = models.IntegerField()
class Meta:
managed = False
db_table = 'companies_leads'
unique_together = (('company', 'lead'),)
class ContactMergeRecords(models.Model):
contact = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
merged_id = models.IntegerField()
name = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'contact_merge_records'
class DynamicContent(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
translation_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
variant_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
sent_count = models.IntegerField()
content = models.TextField(blank=True, null=True)
lang = models.CharField(max_length=255)
variant_settings = models.TextField(blank=True, null=True)
variant_start_date = models.DateTimeField(blank=True, null=True)
filters = models.TextField(blank=True, null=True)
is_campaign_based = models.IntegerField()
slot_name = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'dynamic_content'
class DynamicContentLeadData(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
dynamic_content = models.ForeignKey(DynamicContent, models.DO_NOTHING, blank=True, null=True)
date_added = models.DateTimeField(blank=True, null=True)
slot = models.TextField()
class Meta:
managed = False
db_table = 'dynamic_content_lead_data'
class DynamicContentStats(models.Model):
dynamic_content = models.ForeignKey(DynamicContent, models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
date_sent = models.DateTimeField()
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
tokens = models.TextField(blank=True, null=True)
sent_count = models.IntegerField(blank=True, null=True)
last_sent = models.DateTimeField(blank=True, null=True)
sent_details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'dynamic_content_stats'
class EmailAssetsXref(models.Model):
email = models.ForeignKey('Emails', models.DO_NOTHING, primary_key=True)
asset = models.ForeignKey(Assets, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'email_assets_xref'
unique_together = (('email', 'asset'),)
class EmailCopies(models.Model):
id = models.CharField(primary_key=True, max_length=32)
date_created = models.DateTimeField()
body = models.TextField(blank=True, null=True)
subject = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'email_copies'
class EmailListXref(models.Model):
email = models.ForeignKey('Emails', models.DO_NOTHING, primary_key=True)
leadlist = models.ForeignKey('LeadLists', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'email_list_xref'
unique_together = (('email', 'leadlist'),)
class EmailStatReplies(models.Model):
id = models.CharField(primary_key=True, max_length=36)
stat = models.ForeignKey('EmailStats', models.DO_NOTHING)
date_replied = models.DateTimeField()
message_id = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'email_stat_replies'
class EmailStats(models.Model):
email = models.ForeignKey('Emails', models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
list = models.ForeignKey('LeadLists', models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey('IpAddresses', models.DO_NOTHING, blank=True, null=True)
copy = models.ForeignKey(EmailCopies, models.DO_NOTHING, blank=True, null=True)
email_address = models.CharField(max_length=255)
date_sent = models.DateTimeField()
is_read = models.IntegerField()
is_failed = models.IntegerField()
viewed_in_browser = models.IntegerField()
date_read = models.DateTimeField(blank=True, null=True)
tracking_hash = models.CharField(max_length=255, blank=True, null=True)
retry_count = models.IntegerField(blank=True, null=True)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
tokens = models.TextField(blank=True, null=True)
open_count = models.IntegerField(blank=True, null=True)
last_opened = models.DateTimeField(blank=True, null=True)
open_details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'email_stats'
class EmailStatsDevices(models.Model):
device = models.ForeignKey('LeadDevices', models.DO_NOTHING, blank=True, null=True)
stat = models.ForeignKey(EmailStats, models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey('IpAddresses', models.DO_NOTHING, blank=True, null=True)
date_opened = models.DateTimeField()
class Meta:
managed = False
db_table = 'email_stats_devices'
class Emails(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
translation_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
variant_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
unsubscribeform = models.ForeignKey('Forms', models.DO_NOTHING, blank=True, null=True)
preference_center = models.ForeignKey('Pages', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
subject = models.TextField(blank=True, null=True)
from_address = models.CharField(max_length=255, blank=True, null=True)
from_name = models.CharField(max_length=255, blank=True, null=True)
reply_to_address = models.CharField(max_length=255, blank=True, null=True)
bcc_address = models.CharField(max_length=255, blank=True, null=True)
template = models.CharField(max_length=255, blank=True, null=True)
content = models.TextField(blank=True, null=True)
utm_tags = models.TextField(blank=True, null=True)
plain_text = models.TextField(blank=True, null=True)
custom_html = models.TextField(blank=True, null=True)
email_type = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
read_count = models.IntegerField()
sent_count = models.IntegerField()
revision = models.IntegerField()
lang = models.CharField(max_length=255)
variant_settings = models.TextField(blank=True, null=True)
variant_start_date = models.DateTimeField(blank=True, null=True)
dynamic_content = models.TextField(blank=True, null=True)
variant_sent_count = models.IntegerField()
variant_read_count = models.IntegerField()
headers = models.TextField()
class Meta:
managed = False
db_table = 'emails'
class Focus(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
focus_type = models.CharField(max_length=255)
style = models.CharField(max_length=255)
website = models.CharField(max_length=255, blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
properties = models.TextField(blank=True, null=True)
utm_tags = models.TextField(blank=True, null=True)
form_id = models.IntegerField(blank=True, null=True)
cache = models.TextField(blank=True, null=True)
html_mode = models.CharField(max_length=255, blank=True, null=True)
editor = models.TextField(blank=True, null=True)
html = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'focus'
class FocusStats(models.Model):
focus = models.ForeignKey(Focus, models.DO_NOTHING)
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
type = models.CharField(max_length=255)
type_id = models.IntegerField(blank=True, null=True)
date_added = models.DateTimeField()
class Meta:
managed = False
db_table = 'focus_stats'
class FormActions(models.Model):
form = models.ForeignKey('Forms', models.DO_NOTHING)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
type = models.CharField(max_length=50)
action_order = models.IntegerField()
properties = models.TextField()
class Meta:
managed = False
db_table = 'form_actions'
class FormFields(models.Model):
form = models.ForeignKey('Forms', models.DO_NOTHING)
label = models.TextField()
show_label = models.IntegerField(blank=True, null=True)
alias = models.CharField(max_length=255)
type = models.CharField(max_length=255)
is_custom = models.IntegerField()
custom_parameters = models.TextField(blank=True, null=True)
default_value = models.TextField(blank=True, null=True)
is_required = models.IntegerField()
validation_message = models.TextField(blank=True, null=True)
help_message = models.TextField(blank=True, null=True)
field_order = models.IntegerField(blank=True, null=True)
properties = models.TextField(blank=True, null=True)
label_attr = models.CharField(max_length=255, blank=True, null=True)
input_attr = models.CharField(max_length=255, blank=True, null=True)
container_attr = models.CharField(max_length=255, blank=True, null=True)
lead_field = models.CharField(max_length=255, blank=True, null=True)
save_result = models.IntegerField(blank=True, null=True)
is_auto_fill = models.IntegerField(blank=True, null=True)
show_when_value_exists = models.IntegerField(blank=True, null=True)
show_after_x_submissions = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'form_fields'
class FormResults1Fomu(models.Model):
submission_id = models.IntegerField(primary_key=True)
form_id = models.IntegerField()
class Meta:
managed = False
db_table = 'form_results_1_fomu'
unique_together = (('submission_id', 'form_id'),)
class FormResults2Documentdo(models.Model):
submission_id = models.IntegerField(primary_key=True)
form_id = models.IntegerField()
yu_she_ming = models.TextField(blank=True, null=True)
email = models.TextField(blank=True, null=True)
dan_dang_zhe_yang_xing = models.TextField(blank=True, null=True)
dan_dang_zhe_yang_ming = models.TextField(blank=True, null=True)
dian_hua_fan_hao = models.TextField(blank=True, null=True)
dou_dao_fu_xian = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'form_results_2_documentdo'
unique_together = (('submission_id', 'form_id'),)
class FormResults3Partnerpro(models.Model):
submission_id = models.IntegerField(primary_key=True)
form_id = models.IntegerField()
yu_she_ming = models.TextField(blank=True, null=True)
yu_sheweb_url = models.TextField(blank=True, null=True)
go_dan_dang_zhe_yang_ming = models.TextField(blank=True, null=True)
go_dan_dang_zhe_yang_ming1 = models.TextField(blank=True, null=True)
go_dan_dang_zhe_yangmerua1 = models.TextField(blank=True, null=True)
go_dan_dang_zhe_yang_dian = models.TextField(blank=True, null=True)
zhu_yao_qu_yin_zhi_pin = models.TextField(blank=True, null=True)
zhu_yao_fan_mai_de_yu = models.TextField(blank=True, null=True)
xiang_ding_fan_mai_tai_sh = models.TextField(blank=True, null=True)
dou_dao_fu_xian = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'form_results_3_partnerpro'
unique_together = (('submission_id', 'form_id'),)
class FormResults4Recruitcon(models.Model):
submission_id = models.IntegerField(primary_key=True)
form_id = models.IntegerField()
han_zi_xing = models.TextField(blank=True, null=True)
han_zi_ming = models.TextField(blank=True, null=True)
kana_xing = models.TextField(blank=True, null=True)
kana_ming = models.TextField(blank=True, null=True)
dian_hua_fan_hao = models.TextField(blank=True, null=True)
meruadoresu = models.TextField(blank=True, null=True)
xi_wang_zhi_zhong = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'form_results_4_recruitcon'
unique_together = (('submission_id', 'form_id'),)
class FormResults5Contact(models.Model):
submission_id = models.IntegerField(primary_key=True)
form_id = models.IntegerField()
yu_she_ming = models.TextField(blank=True, null=True)
go_dan_dang_zhe_xing = models.TextField(blank=True, null=True)
go_dan_dang_zhe_ming1 = models.TextField(blank=True, null=True)
meruadoresu = models.TextField(blank=True, null=True)
o_wen_hese_jian_ming = models.TextField(blank=True, null=True)
o_wen_hese_zhong_bie = models.TextField(blank=True, null=True)
dui_xiang_zhi_pin = models.TextField(blank=True, null=True)
xiang_ding_tai_shu = models.TextField(blank=True, null=True)
xian_zai_li_yong_zhongnos = models.TextField(blank=True, null=True)
zai_fan_dui_xiang_qi_ye_g = models.TextField(blank=True, null=True)
o_wen_hese_nei_rong = models.TextField(blank=True, null=True)
dian_hua_fan_hao = models.TextField(blank=True, null=True)
dou_dao_fu_xian = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'form_results_5_contact'
unique_together = (('submission_id', 'form_id'),)
class FormSubmissions(models.Model):
form = models.ForeignKey('Forms', models.DO_NOTHING)
ip = models.ForeignKey('IpAddresses', models.DO_NOTHING)
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
page = models.ForeignKey('Pages', models.DO_NOTHING, blank=True, null=True)
tracking_id = models.CharField(max_length=255, blank=True, null=True)
date_submitted = models.DateTimeField()
referer = models.TextField()
class Meta:
managed = False
db_table = 'form_submissions'
class Forms(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
alias = models.CharField(max_length=255)
cached_html = models.TextField(blank=True, null=True)
post_action = models.CharField(max_length=255)
post_action_property = models.CharField(max_length=255, blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
template = models.CharField(max_length=255, blank=True, null=True)
in_kiosk_mode = models.IntegerField(blank=True, null=True)
render_style = models.IntegerField(blank=True, null=True)
form_type = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'forms'
class Imports(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
dir = models.CharField(max_length=255)
file = models.CharField(max_length=255)
original_file = models.CharField(max_length=255, blank=True, null=True)
line_count = models.IntegerField()
inserted_count = models.IntegerField()
updated_count = models.IntegerField()
ignored_count = models.IntegerField()
priority = models.IntegerField()
status = models.IntegerField()
date_started = models.DateTimeField(blank=True, null=True)
date_ended = models.DateTimeField(blank=True, null=True)
object = models.CharField(max_length=255)
properties = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'imports'
class IntegrationEntity(models.Model):
date_added = models.DateTimeField()
integration = models.CharField(max_length=255, blank=True, null=True)
integration_entity = models.CharField(max_length=255, blank=True, null=True)
integration_entity_id = models.CharField(max_length=255, blank=True, null=True)
internal_entity = models.CharField(max_length=255, blank=True, null=True)
internal_entity_id = models.IntegerField(blank=True, null=True)
last_sync_date = models.DateTimeField(blank=True, null=True)
internal = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'integration_entity'
class IpAddresses(models.Model):
ip_address = models.CharField(max_length=45)
ip_details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'ip_addresses'
class LeadCategories(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING)
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
manually_removed = models.IntegerField()
manually_added = models.IntegerField()
class Meta:
managed = False
db_table = 'lead_categories'
class LeadCompaniesChangeLog(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
type = models.TextField()
event_name = models.CharField(max_length=255)
action_name = models.CharField(max_length=255)
company_id = models.IntegerField()
date_added = models.DateTimeField()
class Meta:
managed = False
db_table = 'lead_companies_change_log'
class LeadDevices(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
client_info = models.TextField(blank=True, null=True)
device = models.CharField(max_length=255, blank=True, null=True)
device_os_name = models.CharField(max_length=255, blank=True, null=True)
device_os_shortname = models.CharField(max_length=255, blank=True, null=True)
device_os_version = models.CharField(max_length=255, blank=True, null=True)
device_os_platform = models.CharField(max_length=255, blank=True, null=True)
device_brand = models.CharField(max_length=255, blank=True, null=True)
device_model = models.CharField(max_length=255, blank=True, null=True)
device_fingerprint = models.CharField(max_length=255, blank=True, null=True)
tracking_id = models.CharField(unique=True, max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_devices'
class LeadDonotcontact(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
date_added = models.DateTimeField()
reason = models.SmallIntegerField()
channel = models.CharField(max_length=255)
channel_id = models.IntegerField(blank=True, null=True)
comments = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_donotcontact'
class LeadEventLog(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING, blank=True, null=True)
user_id = models.IntegerField(blank=True, null=True)
user_name = models.CharField(max_length=255, blank=True, null=True)
bundle = models.CharField(max_length=255, blank=True, null=True)
object = models.CharField(max_length=255, blank=True, null=True)
action = models.CharField(max_length=255, blank=True, null=True)
object_id = models.IntegerField(blank=True, null=True)
date_added = models.DateTimeField()
properties = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_event_log'
class LeadFields(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
label = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
type = models.CharField(max_length=50)
field_group = models.CharField(max_length=255, blank=True, null=True)
default_value = models.CharField(max_length=255, blank=True, null=True)
is_required = models.IntegerField()
is_fixed = models.IntegerField()
is_visible = models.IntegerField()
is_short_visible = models.IntegerField()
is_listable = models.IntegerField()
is_publicly_updatable = models.IntegerField()
is_unique_identifer = models.IntegerField(blank=True, null=True)
field_order = models.IntegerField(blank=True, null=True)
object = models.CharField(max_length=255, blank=True, null=True)
properties = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_fields'
class LeadFrequencyrules(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
frequency_number = models.SmallIntegerField(blank=True, null=True)
frequency_time = models.CharField(max_length=25, blank=True, null=True)
channel = models.CharField(max_length=255)
preferred_channel = models.IntegerField()
pause_from_date = models.DateTimeField(blank=True, null=True)
pause_to_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_frequencyrules'
class LeadIpsXref(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING, primary_key=True)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'lead_ips_xref'
unique_together = (('lead', 'ip'),)
class LeadLists(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
alias = models.CharField(max_length=255)
filters = models.TextField()
is_global = models.IntegerField()
class Meta:
managed = False
db_table = 'lead_lists'
class LeadListsLeads(models.Model):
leadlist = models.ForeignKey(LeadLists, models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
manually_removed = models.IntegerField()
manually_added = models.IntegerField()
class Meta:
managed = False
db_table = 'lead_lists_leads'
unique_together = (('leadlist', 'lead'),)
class LeadNotes(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
text = models.TextField()
type = models.CharField(max_length=50, blank=True, null=True)
date_time = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_notes'
class LeadPointsChangeLog(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING)
type = models.TextField()
event_name = models.CharField(max_length=255)
action_name = models.CharField(max_length=255)
delta = models.IntegerField()
date_added = models.DateTimeField()
class Meta:
managed = False
db_table = 'lead_points_change_log'
class LeadStagesChangeLog(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
stage = models.ForeignKey('Stages', models.DO_NOTHING, blank=True, null=True)
event_name = models.CharField(max_length=255)
action_name = models.CharField(max_length=255)
date_added = models.DateTimeField()
class Meta:
managed = False
db_table = 'lead_stages_change_log'
class LeadTags(models.Model):
tag = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'lead_tags'
class LeadTagsXref(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING, primary_key=True)
tag = models.ForeignKey(LeadTags, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'lead_tags_xref'
unique_together = (('lead', 'tag'),)
class LeadUtmtags(models.Model):
lead = models.ForeignKey('Leads', models.DO_NOTHING)
date_added = models.DateTimeField()
query = models.TextField(blank=True, null=True)
referer = models.TextField(blank=True, null=True)
remote_host = models.CharField(max_length=255, blank=True, null=True)
url = models.CharField(max_length=255, blank=True, null=True)
user_agent = models.TextField(blank=True, null=True)
utm_campaign = models.CharField(max_length=255, blank=True, null=True)
utm_content = models.CharField(max_length=255, blank=True, null=True)
utm_medium = models.CharField(max_length=255, blank=True, null=True)
utm_source = models.CharField(max_length=255, blank=True, null=True)
utm_term = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'lead_utmtags'
class Leads(models.Model):
owner = models.ForeignKey('Users', models.DO_NOTHING, blank=True, null=True)
stage = models.ForeignKey('Stages', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
points = models.IntegerField()
last_active = models.DateTimeField(blank=True, null=True)
internal = models.TextField(blank=True, null=True)
social_cache = models.TextField(blank=True, null=True)
date_identified = models.DateTimeField(blank=True, null=True)
preferred_profile_image = models.CharField(max_length=255, blank=True, null=True)
title = models.CharField(max_length=255, blank=True, null=True)
firstname = models.CharField(max_length=255, blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
company = models.CharField(max_length=255, blank=True, null=True)
position = models.CharField(max_length=255, blank=True, null=True)
email = models.CharField(max_length=255, blank=True, null=True)
phone = models.CharField(max_length=255, blank=True, null=True)
mobile = models.CharField(max_length=255, blank=True, null=True)
address1 = models.CharField(max_length=255, blank=True, null=True)
address2 = models.CharField(max_length=255, blank=True, null=True)
city = models.CharField(max_length=255, blank=True, null=True)
state = models.CharField(max_length=255, blank=True, null=True)
zipcode = models.CharField(max_length=255, blank=True, null=True)
timezone = models.CharField(max_length=255, blank=True, null=True)
country = models.CharField(max_length=255, blank=True, null=True)
fax = models.CharField(max_length=255, blank=True, null=True)
preferred_locale = models.CharField(max_length=255, blank=True, null=True)
attribution_date = models.DateTimeField(blank=True, null=True)
attribution = models.FloatField(blank=True, null=True)
website = models.TextField(blank=True, null=True)
facebook = models.CharField(max_length=255, blank=True, null=True)
foursquare = models.CharField(max_length=255, blank=True, null=True)
googleplus = models.CharField(max_length=255, blank=True, null=True)
instagram = models.CharField(max_length=255, blank=True, null=True)
linkedin = models.CharField(max_length=255, blank=True, null=True)
skype = models.CharField(max_length=255, blank=True, null=True)
twitter = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'leads'
class MessageChannels(models.Model):
message = models.ForeignKey('Messages', models.DO_NOTHING)
channel = models.CharField(max_length=255)
channel_id = models.IntegerField(blank=True, null=True)
properties = models.TextField()
is_enabled = models.IntegerField()
class Meta:
managed = False
db_table = 'message_channels'
unique_together = (('message', 'channel'),)
class MessageQueue(models.Model):
event = models.ForeignKey(CampaignEvents, models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING)
channel = models.CharField(max_length=255)
channel_id = models.IntegerField()
priority = models.SmallIntegerField()
max_attempts = models.SmallIntegerField()
attempts = models.SmallIntegerField()
success = models.IntegerField()
status = models.CharField(max_length=255)
date_published = models.DateTimeField(blank=True, null=True)
scheduled_date = models.DateTimeField(blank=True, null=True)
last_attempt = models.DateTimeField(blank=True, null=True)
date_sent = models.DateTimeField(blank=True, null=True)
options = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'message_queue'
class Messages(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'messages'
class MonitorPostCount(models.Model):
monitor = models.ForeignKey('Monitoring', models.DO_NOTHING, blank=True, null=True)
post_date = models.DateField()
post_count = models.IntegerField()
class Meta:
managed = False
db_table = 'monitor_post_count'
class Monitoring(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
title = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
lists = models.TextField(blank=True, null=True)
network_type = models.CharField(max_length=255, blank=True, null=True)
revision = models.IntegerField()
stats = models.TextField(blank=True, null=True)
properties = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'monitoring'
class MonitoringLeads(models.Model):
monitor = models.ForeignKey(Monitoring, models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING)
date_added = models.DateTimeField()
class Meta:
managed = False
db_table = 'monitoring_leads'
unique_together = (('monitor', 'lead'),)
class Notifications(models.Model):
user = models.ForeignKey('Users', models.DO_NOTHING)
type = models.CharField(max_length=25, blank=True, null=True)
header = models.CharField(max_length=255, blank=True, null=True)
message = models.TextField()
date_added = models.DateTimeField()
icon_class = models.CharField(max_length=255, blank=True, null=True)
is_read = models.IntegerField()
class Meta:
managed = False
db_table = 'notifications'
class Oauth1AccessTokens(models.Model):
consumer = models.ForeignKey('Oauth1Consumers', models.DO_NOTHING)
user = models.ForeignKey('Users', models.DO_NOTHING)
token = models.CharField(max_length=255)
secret = models.CharField(max_length=255)
expires_at = models.BigIntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'oauth1_access_tokens'
class Oauth1Consumers(models.Model):
name = models.CharField(max_length=255)
consumer_key = models.CharField(max_length=255)
consumer_secret = models.CharField(max_length=255)
callback = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'oauth1_consumers'
class Oauth1Nonces(models.Model):
nonce = models.CharField(primary_key=True, max_length=255)
timestamp = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'oauth1_nonces'
class Oauth1RequestTokens(models.Model):
consumer = models.ForeignKey(Oauth1Consumers, models.DO_NOTHING)
user = models.ForeignKey('Users', models.DO_NOTHING, blank=True, null=True)
token = models.CharField(max_length=255)
secret = models.CharField(max_length=255)
expires_at = models.BigIntegerField()
verifier = models.CharField(max_length=255)
class Meta:
managed = False
db_table = 'oauth1_request_tokens'
class Oauth2Accesstokens(models.Model):
client = models.ForeignKey('Oauth2Clients', models.DO_NOTHING)
user = models.ForeignKey('Users', models.DO_NOTHING)
token = models.CharField(unique=True, max_length=255)
expires_at = models.BigIntegerField(blank=True, null=True)
scope = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'oauth2_accesstokens'
class Oauth2Authcodes(models.Model):
client = models.ForeignKey('Oauth2Clients', models.DO_NOTHING)
user = models.ForeignKey('Users', models.DO_NOTHING)
token = models.CharField(unique=True, max_length=255)
expires_at = models.BigIntegerField(blank=True, null=True)
scope = models.CharField(max_length=255, blank=True, null=True)
redirect_uri = models.TextField()
class Meta:
managed = False
db_table = 'oauth2_authcodes'
class Oauth2Clients(models.Model):
name = models.CharField(max_length=255)
random_id = models.CharField(max_length=255)
secret = models.CharField(max_length=255)
redirect_uris = models.TextField()
allowed_grant_types = models.TextField()
class Meta:
managed = False
db_table = 'oauth2_clients'
class Oauth2Refreshtokens(models.Model):
client = models.ForeignKey(Oauth2Clients, models.DO_NOTHING)
user = models.ForeignKey('Users', models.DO_NOTHING)
token = models.CharField(unique=True, max_length=255)
expires_at = models.BigIntegerField(blank=True, null=True)
scope = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'oauth2_refreshtokens'
class Oauth2UserClientXref(models.Model):
client = models.ForeignKey(Oauth2Clients, models.DO_NOTHING, primary_key=True)
user = models.ForeignKey('Users', models.DO_NOTHING)
class Meta:
managed = False
db_table = 'oauth2_user_client_xref'
unique_together = (('client', 'user'),)
class PageHits(models.Model):
page = models.ForeignKey('Pages', models.DO_NOTHING, blank=True, null=True)
redirect = models.ForeignKey('PageRedirects', models.DO_NOTHING, blank=True, null=True)
email = models.ForeignKey(Emails, models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING)
device = models.ForeignKey(LeadDevices, models.DO_NOTHING, blank=True, null=True)
date_hit = models.DateTimeField()
date_left = models.DateTimeField(blank=True, null=True)
country = models.CharField(max_length=255, blank=True, null=True)
region = models.CharField(max_length=255, blank=True, null=True)
city = models.CharField(max_length=255, blank=True, null=True)
isp = models.CharField(max_length=255, blank=True, null=True)
organization = models.CharField(max_length=255, blank=True, null=True)
code = models.IntegerField()
referer = models.TextField(blank=True, null=True)
url = models.TextField(blank=True, null=True)
url_title = models.CharField(max_length=255, blank=True, null=True)
user_agent = models.TextField(blank=True, null=True)
remote_host = models.CharField(max_length=255, blank=True, null=True)
page_language = models.CharField(max_length=255, blank=True, null=True)
browser_languages = models.TextField(blank=True, null=True)
tracking_id = models.CharField(max_length=255)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
query = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'page_hits'
class PageRedirects(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
redirect_id = models.CharField(max_length=25)
url = models.TextField()
hits = models.IntegerField()
unique_hits = models.IntegerField()
class Meta:
managed = False
db_table = 'page_redirects'
class Pages(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
translation_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
variant_parent = models.ForeignKey('self', models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
title = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
template = models.CharField(max_length=255, blank=True, null=True)
custom_html = models.TextField(blank=True, null=True)
content = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
hits = models.IntegerField()
unique_hits = models.IntegerField()
variant_hits = models.IntegerField()
revision = models.IntegerField()
meta_description = models.CharField(max_length=255, blank=True, null=True)
redirect_type = models.CharField(max_length=100, blank=True, null=True)
redirect_url = models.CharField(max_length=2048, blank=True, null=True)
is_preference_center = models.IntegerField(blank=True, null=True)
no_index = models.IntegerField(blank=True, null=True)
lang = models.CharField(max_length=255)
variant_settings = models.TextField(blank=True, null=True)
variant_start_date = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'pages'
class Permissions(models.Model):
role = models.ForeignKey('Roles', models.DO_NOTHING)
bundle = models.CharField(max_length=50)
name = models.CharField(max_length=50)
bitwise = models.IntegerField()
class Meta:
managed = False
db_table = 'permissions'
unique_together = (('bundle', 'name', 'role'),)
class PluginCitrixEvents(models.Model):
lead = models.ForeignKey(Leads, models.DO_NOTHING)
product = models.CharField(max_length=255)
email = models.CharField(max_length=255)
event_name = models.CharField(max_length=255)
event_desc = models.CharField(max_length=255, blank=True, null=True)
event_type = models.CharField(max_length=50)
event_date = models.DateTimeField()
class Meta:
managed = False
db_table = 'plugin_citrix_events'
class PluginCrmPipedriveOwners(models.Model):
email = models.CharField(max_length=255)
owner_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
db_table = 'plugin_crm_pipedrive_owners'
class PluginIntegrationSettings(models.Model):
plugin = models.ForeignKey('Plugins', models.DO_NOTHING, blank=True, null=True)
name = models.CharField(max_length=255)
is_published = models.IntegerField()
supported_features = models.TextField(blank=True, null=True)
api_keys = models.TextField()
feature_settings = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'plugin_integration_settings'
class Plugins(models.Model):
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
is_missing = models.IntegerField()
bundle = models.CharField(unique=True, max_length=50)
version = models.CharField(max_length=255, blank=True, null=True)
author = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'plugins'
class PointLeadActionLog(models.Model):
point = models.ForeignKey('Points', models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING, blank=True, null=True)
date_fired = models.DateTimeField()
class Meta:
managed = False
db_table = 'point_lead_action_log'
unique_together = (('point', 'lead'),)
class PointLeadEventLog(models.Model):
event = models.ForeignKey('PointTriggerEvents', models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING, blank=True, null=True)
date_fired = models.DateTimeField()
class Meta:
managed = False
db_table = 'point_lead_event_log'
unique_together = (('event', 'lead'),)
class PointTriggerEvents(models.Model):
trigger = models.ForeignKey('PointTriggers', models.DO_NOTHING)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
type = models.CharField(max_length=50)
action_order = models.IntegerField()
properties = models.TextField()
class Meta:
managed = False
db_table = 'point_trigger_events'
class PointTriggers(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
points = models.IntegerField()
color = models.CharField(max_length=7)
trigger_existing_leads = models.IntegerField()
class Meta:
managed = False
db_table = 'point_triggers'
class Points(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
type = models.CharField(max_length=50)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
delta = models.IntegerField()
properties = models.TextField()
class Meta:
managed = False
db_table = 'points'
class PushIds(models.Model):
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
push_id = models.CharField(max_length=255)
enabled = models.IntegerField()
mobile = models.IntegerField()
class Meta:
managed = False
db_table = 'push_ids'
class PushNotificationListXref(models.Model):
notification = models.ForeignKey('PushNotifications', models.DO_NOTHING, primary_key=True)
leadlist = models.ForeignKey(LeadLists, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'push_notification_list_xref'
unique_together = (('notification', 'leadlist'),)
class PushNotificationStats(models.Model):
notification = models.ForeignKey('PushNotifications', models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
list = models.ForeignKey(LeadLists, models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING, blank=True, null=True)
date_sent = models.DateTimeField()
date_read = models.DateTimeField(blank=True, null=True)
is_clicked = models.IntegerField()
date_clicked = models.DateTimeField(blank=True, null=True)
tracking_hash = models.CharField(max_length=255, blank=True, null=True)
retry_count = models.IntegerField(blank=True, null=True)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
tokens = models.TextField(blank=True, null=True)
click_count = models.IntegerField(blank=True, null=True)
last_clicked = models.DateTimeField(blank=True, null=True)
click_details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'push_notification_stats'
class PushNotifications(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
lang = models.CharField(max_length=255)
url = models.TextField(blank=True, null=True)
heading = models.TextField()
message = models.TextField()
button = models.TextField(blank=True, null=True)
utm_tags = models.TextField(blank=True, null=True)
notification_type = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
read_count = models.IntegerField()
sent_count = models.IntegerField()
mobile = models.IntegerField()
mobilesettings = models.TextField(db_column='mobileSettings') # Field name made lowercase.
class Meta:
managed = False
db_table = 'push_notifications'
class Reports(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
system = models.IntegerField()
source = models.CharField(max_length=255)
columns = models.TextField(blank=True, null=True)
filters = models.TextField(blank=True, null=True)
table_order = models.TextField(blank=True, null=True)
graphs = models.TextField(blank=True, null=True)
group_by = models.TextField(blank=True, null=True)
aggregators = models.TextField(blank=True, null=True)
settings = models.TextField(blank=True, null=True)
is_scheduled = models.IntegerField()
schedule_unit = models.CharField(max_length=255, blank=True, null=True)
to_address = models.CharField(max_length=255, blank=True, null=True)
schedule_day = models.CharField(max_length=255, blank=True, null=True)
schedule_month_frequency = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'reports'
class ReportsSchedulers(models.Model):
report = models.ForeignKey(Reports, models.DO_NOTHING)
schedule_date = models.DateTimeField()
class Meta:
managed = False
db_table = 'reports_schedulers'
class Roles(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
is_admin = models.IntegerField()
readable_permissions = models.TextField()
class Meta:
managed = False
db_table = 'roles'
class SamlIdEntry(models.Model):
id = models.CharField(primary_key=True, max_length=255)
entity_id = models.CharField(max_length=255)
expirytimestamp = models.IntegerField(db_column='expiryTimestamp') # Field name made lowercase.
class Meta:
managed = False
db_table = 'saml_id_entry'
unique_together = (('id', 'entity_id'),)
class SmsMessageListXref(models.Model):
sms = models.ForeignKey('SmsMessages', models.DO_NOTHING, primary_key=True)
leadlist = models.ForeignKey(LeadLists, models.DO_NOTHING)
class Meta:
managed = False
db_table = 'sms_message_list_xref'
unique_together = (('sms', 'leadlist'),)
class SmsMessageStats(models.Model):
sms = models.ForeignKey('SmsMessages', models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
list = models.ForeignKey(LeadLists, models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING, blank=True, null=True)
date_sent = models.DateTimeField()
tracking_hash = models.CharField(max_length=255, blank=True, null=True)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
tokens = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'sms_message_stats'
class SmsMessages(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
lang = models.CharField(max_length=255)
message = models.TextField()
sms_type = models.TextField(blank=True, null=True)
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
sent_count = models.IntegerField()
class Meta:
managed = False
db_table = 'sms_messages'
class StageLeadActionLog(models.Model):
stage = models.ForeignKey('Stages', models.DO_NOTHING, primary_key=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING, blank=True, null=True)
date_fired = models.DateTimeField()
class Meta:
managed = False
db_table = 'stage_lead_action_log'
unique_together = (('stage', 'lead'),)
class Stages(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
weight = models.IntegerField()
publish_up = models.DateTimeField(blank=True, null=True)
publish_down = models.DateTimeField(blank=True, null=True)
class Meta:
managed = False
db_table = 'stages'
class TweetStats(models.Model):
tweet = models.ForeignKey('Tweets', models.DO_NOTHING, blank=True, null=True)
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
twitter_tweet_id = models.CharField(max_length=255, blank=True, null=True)
handle = models.CharField(max_length=255)
date_sent = models.DateTimeField(blank=True, null=True)
is_failed = models.IntegerField(blank=True, null=True)
retry_count = models.IntegerField(blank=True, null=True)
source = models.CharField(max_length=255, blank=True, null=True)
source_id = models.IntegerField(blank=True, null=True)
favorite_count = models.IntegerField(blank=True, null=True)
retweet_count = models.IntegerField(blank=True, null=True)
response_details = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tweet_stats'
class Tweets(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
page = models.ForeignKey(Pages, models.DO_NOTHING, blank=True, null=True)
asset = models.ForeignKey(Assets, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
media_id = models.CharField(max_length=255, blank=True, null=True)
media_path = models.CharField(max_length=255, blank=True, null=True)
text = models.CharField(max_length=280)
sent_count = models.IntegerField(blank=True, null=True)
favorite_count = models.IntegerField(blank=True, null=True)
retweet_count = models.IntegerField(blank=True, null=True)
lang = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'tweets'
class UserTokens(models.Model):
user = models.ForeignKey('Users', models.DO_NOTHING)
authorizator = models.CharField(max_length=32)
secret = models.CharField(unique=True, max_length=120)
expiration = models.DateTimeField(blank=True, null=True)
one_time_only = models.IntegerField()
class Meta:
managed = False
db_table = 'user_tokens'
class Users(models.Model):
role = models.ForeignKey(Roles, models.DO_NOTHING)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
username = models.CharField(unique=True, max_length=255)
password = models.CharField(max_length=64)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
email = models.CharField(unique=True, max_length=255)
position = models.CharField(max_length=255, blank=True, null=True)
timezone = models.CharField(max_length=255, blank=True, null=True)
locale = models.CharField(max_length=255, blank=True, null=True)
last_login = models.DateTimeField(blank=True, null=True)
last_active = models.DateTimeField(blank=True, null=True)
online_status = models.CharField(max_length=255, blank=True, null=True)
preferences = models.TextField(blank=True, null=True)
signature = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'users'
class VideoHits(models.Model):
lead = models.ForeignKey(Leads, models.DO_NOTHING, blank=True, null=True)
ip = models.ForeignKey(IpAddresses, models.DO_NOTHING)
date_hit = models.DateTimeField()
date_left = models.DateTimeField(blank=True, null=True)
country = models.CharField(max_length=255, blank=True, null=True)
region = models.CharField(max_length=255, blank=True, null=True)
city = models.CharField(max_length=255, blank=True, null=True)
isp = models.CharField(max_length=255, blank=True, null=True)
organization = models.CharField(max_length=255, blank=True, null=True)
code = models.IntegerField()
referer = models.TextField(blank=True, null=True)
url = models.TextField(blank=True, null=True)
user_agent = models.TextField(blank=True, null=True)
remote_host = models.CharField(max_length=255, blank=True, null=True)
guid = models.CharField(max_length=255)
page_language = models.CharField(max_length=255, blank=True, null=True)
browser_languages = models.TextField(blank=True, null=True)
channel = models.CharField(max_length=255, blank=True, null=True)
channel_id = models.IntegerField(blank=True, null=True)
time_watched = models.IntegerField(blank=True, null=True)
duration = models.IntegerField(blank=True, null=True)
query = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'video_hits'
class WebhookEvents(models.Model):
webhook = models.ForeignKey('Webhooks', models.DO_NOTHING)
event_type = models.CharField(max_length=50)
class Meta:
managed = False
db_table = 'webhook_events'
class WebhookLogs(models.Model):
webhook = models.ForeignKey('Webhooks', models.DO_NOTHING)
status_code = models.CharField(max_length=50)
date_added = models.DateTimeField(blank=True, null=True)
note = models.CharField(max_length=255, blank=True, null=True)
runtime = models.FloatField(blank=True, null=True)
class Meta:
managed = False
db_table = 'webhook_logs'
class WebhookQueue(models.Model):
webhook = models.ForeignKey('Webhooks', models.DO_NOTHING)
event = models.ForeignKey(WebhookEvents, models.DO_NOTHING)
date_added = models.DateTimeField(blank=True, null=True)
payload = models.TextField()
class Meta:
managed = False
db_table = 'webhook_queue'
class Webhooks(models.Model):
category = models.ForeignKey(Categories, models.DO_NOTHING, blank=True, null=True)
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
webhook_url = models.CharField(max_length=255)
events_orderby_dir = models.CharField(max_length=255, blank=True, null=True)
class Meta:
managed = False
db_table = 'webhooks'
class Widgets(models.Model):
is_published = models.IntegerField()
date_added = models.DateTimeField(blank=True, null=True)
created_by = models.IntegerField(blank=True, null=True)
created_by_user = models.CharField(max_length=255, blank=True, null=True)
date_modified = models.DateTimeField(blank=True, null=True)
modified_by = models.IntegerField(blank=True, null=True)
modified_by_user = models.CharField(max_length=255, blank=True, null=True)
checked_out = models.DateTimeField(blank=True, null=True)
checked_out_by = models.IntegerField(blank=True, null=True)
checked_out_by_user = models.CharField(max_length=255, blank=True, null=True)
name = models.CharField(max_length=255)
type = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
cache_timeout = models.IntegerField(blank=True, null=True)
ordering = models.IntegerField(blank=True, null=True)
params = models.TextField(blank=True, null=True)
class Meta:
managed = False
db_table = 'widgets'
| 43.223819
| 104
| 0.730344
| 10,756
| 84,200
| 5.550762
| 0.060989
| 0.112756
| 0.16287
| 0.212984
| 0.866893
| 0.854549
| 0.817014
| 0.778072
| 0.744322
| 0.683154
| 0
| 0.015362
| 0.158112
| 84,200
| 1,947
| 105
| 43.24602
| 0.826879
| 0.006021
| 0
| 0.605704
| 1
| 0
| 0.032109
| 0.006465
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00062
| 0.00186
| 0
| 0.849969
| 0.00062
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
cfe2acf5df39b6354ac84b44bc21a18195edfe11
| 2,493
|
py
|
Python
|
2020/Day-12/Rain_Risk/vector.py
|
sreekesari-vangeepuram/aoc-2020
|
645531be0208affe042ac0328105b9ef3cfc9dbf
|
[
"MIT"
] | 1
|
2021-07-09T07:56:14.000Z
|
2021-07-09T07:56:14.000Z
|
2020/Day-12/Rain_Risk/vector.py
|
sreekesari-vangeepuram/adventofcode
|
645531be0208affe042ac0328105b9ef3cfc9dbf
|
[
"MIT"
] | null | null | null |
2020/Day-12/Rain_Risk/vector.py
|
sreekesari-vangeepuram/adventofcode
|
645531be0208affe042ac0328105b9ef3cfc9dbf
|
[
"MIT"
] | null | null | null |
class vector:
def __init__(self, x, y, pointing_direction):
self.x = x
self.y = y
self.direction = pointing_direction
def get_pos(self):
return (self.x, self.y, self.direction)
def change_position(self, ins):
d = self.direction
if d == 'E':
if ins[0] == 'E' or ins[0] == 'F': self.x += ins[1]
elif ins[0] == 'W': self.x -= ins[1]
elif ins[0] == 'N': self.y += ins[1]
elif ins[0] == 'S': self.y -= ins[1]
elif d == 'W':
if ins[0] == 'W' or ins[0] == 'F': self.x -= ins[1]
elif ins[0] == 'E': self.x += ins[1]
elif ins[0] == 'N': self.y += ins[1]
elif ins[0] == 'S': self.y -= ins[1]
elif d == 'N':
if ins[0] == 'N' or ins[0] == 'F': self.y += ins[1]
elif ins[0] == 'W': self.x -= ins[1]
elif ins[0] == 'E': self.x += ins[1]
elif ins[0] == 'S': self.y -= ins[1]
elif d == 'S':
if ins[0] == 'S' or ins[0] == 'F': self.y -= ins[1]
elif ins[0] == 'W': self.x -= ins[1]
elif ins[0] == 'N': self.y += ins[1]
elif ins[0] == 'E': self.x += ins[1]
def change_direction(self, ins):
d = self.direction
if ins[0] == 'R':
if d == 'E':
if ins[1] == 90:
self.direction = 'S'
elif ins[1] == 180:
self.direction = 'W'
elif ins[1] == 270:
self.direction = 'N'
elif d == 'W':
if ins[1] == 90:
self.direction = 'N'
elif ins[1] == 180:
self.direction = 'E'
elif ins[1] == 270:
self.direction = 'S'
elif d == 'N':
if ins[1] == 90:
self.direction = 'E'
elif ins[1] == 180:
self.direction = 'S'
elif ins[1] == 270:
self.direction = 'W'
elif d == 'S':
if ins[1] == 90:
self.direction = 'W'
elif ins[1] == 180:
self.direction = 'N'
elif ins[1] == 270:
self.direction = 'E'
elif ins[0] == 'L':
if d == 'E':
if ins[1] == 90:
self.direction = 'N'
elif ins[1] == 180:
self.direction = 'W'
elif ins[1] == 270:
self.direction = 'S'
elif d == 'W':
if ins[1] == 90:
self.direction = 'S'
elif ins[1] == 180:
self.direction = 'E'
elif ins[1] == 270:
self.direction = 'N'
elif d == 'N':
if ins[1] == 90:
self.direction = 'W'
elif ins[1] == 180:
self.direction = 'S'
elif ins[1] == 270:
self.direction = 'E'
elif d == 'S':
if ins[1] == 90:
self.direction = 'E'
elif ins[1] == 180:
self.direction = 'N'
elif ins[1] == 270:
self.direction = 'W'
def manhattan_distance(self):
return abs(self.x)+abs(self.y)
| 24.683168
| 54
| 0.490975
| 413
| 2,493
| 2.939467
| 0.082324
| 0.131796
| 0.105437
| 0.108731
| 0.817957
| 0.794893
| 0.757002
| 0.755354
| 0.750412
| 0.710873
| 0
| 0.070986
| 0.288006
| 2,493
| 100
| 55
| 24.93
| 0.612958
| 0
| 0
| 0.813187
| 0
| 0
| 0.023265
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054945
| false
| 0
| 0
| 0.021978
| 0.087912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cfe73730991d31b8a39a3f48d514ae3136c07d3d
| 243
|
py
|
Python
|
PackageName/plots/__init__.py
|
asiaszmek/CA1TestUnit
|
ff4d70a14ab15331528dbdd67973a9b6c037a13f
|
[
"BSD-3-Clause"
] | 1
|
2021-05-05T15:44:13.000Z
|
2021-05-05T15:44:13.000Z
|
DemoTestUnit/plots/__init__.py
|
appukuttan-shailesh/DemoTestUnit
|
bd0430fdb730ad18492f3cd32b1af039fa9fe093
|
[
"BSD-3-Clause"
] | null | null | null |
DemoTestUnit/plots/__init__.py
|
appukuttan-shailesh/DemoTestUnit
|
bd0430fdb730ad18492f3cd32b1af039fa9fe093
|
[
"BSD-3-Clause"
] | null | null | null |
import sciunit
# ===============================================================================
# Enter source code for generating result related files here
# ===============================================================================
| 30.375
| 81
| 0.259259
| 11
| 243
| 5.727273
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078189
| 243
| 7
| 82
| 34.714286
| 0.28125
| 0.897119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
32150535b32a48384a2a6e655f4952188ed39b17
| 2,276
|
py
|
Python
|
kiqpo/core/Head.py
|
bionic-py/Bionic
|
a54c85107a6a2aa9a9563b6b3e1f9bb64d63faa4
|
[
"MIT"
] | 9
|
2021-10-31T03:38:16.000Z
|
2021-12-17T00:03:36.000Z
|
kiqpo/core/Head.py
|
bionic-py/Bionic
|
a54c85107a6a2aa9a9563b6b3e1f9bb64d63faa4
|
[
"MIT"
] | 12
|
2021-11-11T14:18:09.000Z
|
2021-12-03T14:00:25.000Z
|
kiqpo/core/Head.py
|
kiqpo/kiqpo
|
a54c85107a6a2aa9a9563b6b3e1f9bb64d63faa4
|
[
"MIT"
] | 3
|
2022-03-03T18:30:53.000Z
|
2022-03-09T13:29:39.000Z
|
def HeadCore(Title="Bionic-Ui", Css=True, Style="./css/core.css", ThemeColor="#119f7f", SiteUrl="www.hey.com", Image_description="this is an image description", Imageurl="", Type="Landing-page", Keywords="", Description="", Author="", Icon="./assets/icons/png/icon.png"):
if Css == True:
return f"""<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="theme-color" content="{ThemeColor}" />
<meta name="description" content="{Description}" />
<meta name="keywords" content="{Keywords}">
<meta name="author" content="{Author}" />
<meta property="og:title" content="{Title}" />
<meta property="og:url" content="{SiteUrl}" />
<link rel="manifest" href="/manifest.json" />
<meta property="og:description" content="{Image_description}" />
<meta property="og:image" itemprop="image" content="{Imageurl}" />
<meta property="og:type" content="{Type}"/>
<link rel="stylesheet" href="./css/material.min.css">
<link href="./css/core.css" rel="stylesheet" />
<link rel="stylesheet" href="./css/theme.css" />
<link rel="icon" href="{Icon}" sizes="16x16" type="image/ico" />
<title>{Title}</title>"""
else:
return f"""<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0 /">
<meta name="theme-color" content="{ThemeColor}" />
<meta name="description" content="{Description}" />
<meta name="keywords" content="{Keywords}" />
<meta name="author" content="{Author}" />
<meta property="og:title" content="{Title}" />
<meta property="og:url" content="{SiteUrl}" />
<link rel="manifest" href="/manifest.json" />
<meta property="og:description" content="{Image_description}" />
<meta property="og:image" itemprop="image" content="{Imageurl}" />
<meta property="og:type" content="{Type}"/>
<link rel="stylesheet" href="./css/material.min.css">
<link rel="icon" href="{Icon}" sizes="16x16" type="image/ico" />
<link rel="stylesheet" href="./css/core.css" />
<link rel="stylesheet" href="./css/theme.css" />
<title>{Title}</title>"""
| 56.9
| 271
| 0.634007
| 283
| 2,276
| 5.088339
| 0.250883
| 0.055556
| 0.097222
| 0.072917
| 0.813889
| 0.797222
| 0.797222
| 0.754861
| 0.754861
| 0.754861
| 0
| 0.009212
| 0.141476
| 2,276
| 39
| 272
| 58.358974
| 0.727738
| 0
| 0
| 0.717949
| 0
| 0.102564
| 0.894552
| 0.259227
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5c674f6451cd5c0695a93f10658aa9cd15da5ceb
| 3,255
|
py
|
Python
|
Lesson04/random_resize.py
|
supercatex/ML_Lesson
|
e975d4486be0faccab3f38b55ed73298756dd3d5
|
[
"MIT"
] | 1
|
2020-06-05T07:11:02.000Z
|
2020-06-05T07:11:02.000Z
|
Lesson04/random_resize.py
|
supercatex/ML_Lesson
|
e975d4486be0faccab3f38b55ed73298756dd3d5
|
[
"MIT"
] | null | null | null |
Lesson04/random_resize.py
|
supercatex/ML_Lesson
|
e975d4486be0faccab3f38b55ed73298756dd3d5
|
[
"MIT"
] | null | null | null |
import os
import sys
import cv2
import numpy as np
def generate_csv(
dir_root="../../dataset",
dir_img="img",
csv="data.csv",
new_csv="new_data.csv"
):
dir_output_img = os.path.join(dir_root, dir_img)
path_output_csv = os.path.join(dir_root, csv)
path_output_new_csv = os.path.join(dir_root, new_csv)
if not os.path.exists(dir_output_img):
raise ("Image file not found->", dir_output_img)
print("Reading csv file...")
f = open(path_output_csv, "r")
lines = f.readlines()
f.close()
print("Create csv file...")
f = open(path_output_new_csv, "w")
f.write("img,width,height,x1,y1,x2,y2,class\n")
for i, line in enumerate(lines):
sys.stdout.write("\rReading image file...%d/%d" % (i + 1, len(lines)))
sys.stdout.flush()
temp = line.split(",")
name = temp[0].strip()
if name == "img":
continue
w = int(temp[1].strip())
h = int(temp[2].strip())
x1 = int(temp[3].strip())
y1 = int(temp[4].strip())
x2 = int(temp[5].strip())
y2 = int(temp[6].strip())
label = int(temp[7])
path = os.path.join(dir_output_img, name)
image = cv2.imread(path, cv2.IMREAD_UNCHANGED)
cv2.imwrite(path, image)
f.write("%s,%d,%d,%d,%d,%d,%d,%d\n" % (
name,
image.shape[1],
image.shape[0],
0,
0,
image.shape[1],
image.shape[0],
label
))
del image
f.close()
def random_resize(
dir_root="../../dataset",
dir_img="img",
csv="data.csv",
new_csv="new_data.csv"
):
dir_output_img = os.path.join(dir_root, dir_img)
path_output_csv = os.path.join(dir_root, csv)
path_output_new_csv = os.path.join(dir_root, new_csv)
if not os.path.exists(path_output_csv):
raise("CSV file not found->", path_output_csv)
print("Reading csv file...")
f = open(path_output_csv, "r")
lines = f.readlines()
f.close()
print("Create csv file...")
f = open(path_output_new_csv, "w")
f.write("img,width,height,x1,y1,x2,y2,class\n")
for i, line in enumerate(lines):
sys.stdout.write("\rReading image file...%d/%d" % (i + 1, len(lines)))
sys.stdout.flush()
temp = line.split(",")
name = temp[0].strip()
if name == "img":
continue
w = int(temp[1].strip())
h = int(temp[2].strip())
x1 = int(temp[3].strip())
y1 = int(temp[4].strip())
x2 = int(temp[5].strip())
y2 = int(temp[6].strip())
label = int(temp[7])
rs = np.random.randint(20, 100)
path = os.path.join(dir_output_img, name)
image = cv2.imread(path, cv2.IMREAD_UNCHANGED)
image = cv2.resize(image, (rs, rs))
cv2.imwrite(path, image)
f.write("%s,%d,%d,%d,%d,%d,%d,%d\n" % (
name,
image.shape[1],
image.shape[0],
0,
0,
image.shape[1],
image.shape[0],
label
))
del image
f.close()
if __name__ == "__main__":
# random_resize()
generate_csv()
| 25.833333
| 78
| 0.525346
| 461
| 3,255
| 3.561822
| 0.182213
| 0.017052
| 0.01827
| 0.063337
| 0.853837
| 0.853837
| 0.853837
| 0.853837
| 0.853837
| 0.853837
| 0
| 0.026106
| 0.305684
| 3,255
| 125
| 79
| 26.04
| 0.700442
| 0.004608
| 0
| 0.862745
| 1
| 0
| 0.119209
| 0.037678
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019608
| false
| 0
| 0.039216
| 0
| 0.058824
| 0.039216
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5ccdcf82f06a2dd9192e04ea630e62386dc03b62
| 220
|
py
|
Python
|
HumanModeling/environment/__init__.py
|
nesl/EngagementService
|
bb8dc5a58d2038ace6467bfbcf4d253680628f67
|
[
"BSD-3-Clause"
] | 6
|
2018-09-01T03:32:10.000Z
|
2021-11-14T10:39:41.000Z
|
HumanModeling/environment/__init__.py
|
nesl/EngagementService
|
bb8dc5a58d2038ace6467bfbcf4d253680628f67
|
[
"BSD-3-Clause"
] | null | null | null |
HumanModeling/environment/__init__.py
|
nesl/EngagementService
|
bb8dc5a58d2038ace6467bfbcf4d253680628f67
|
[
"BSD-3-Clause"
] | null | null | null |
from .always_say_ok_user import AlwaysSayOKUser
from .stubborn_user import StubbornUser
from .less_stubborn_user import LessStubbornUser
from .survey_user import SurveyUser
from .mturk_survey_user import MTurkSurveyUser
| 36.666667
| 48
| 0.886364
| 29
| 220
| 6.413793
| 0.517241
| 0.268817
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 220
| 5
| 49
| 44
| 0.93
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7a2a2944a1f1b1934e92da155d7d4589d515eef6
| 1,570
|
py
|
Python
|
pyaz/network/application_gateway/identity/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/network/application_gateway/identity/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/network/application_gateway/identity/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from .... pyaz_utils import _call_az
def assign(gateway_name, identity, resource_group, no_wait=None):
'''
Assign a managed service identity to an application-gateway
Required Parameters:
- gateway_name -- Name of the application gateway.
- identity -- Name or ID of the ManagedIdentity Resource
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az network application-gateway identity assign", locals())
def remove(gateway_name, resource_group, no_wait=None):
'''
Remove the managed service identity of an application-gateway
Required Parameters:
- gateway_name -- Name of the application gateway.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az network application-gateway identity remove", locals())
def show(gateway_name, resource_group):
'''
Show the managed service identity of an application-gateway
Required Parameters:
- gateway_name -- Name of the application gateway.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az network application-gateway identity show", locals())
| 37.380952
| 128
| 0.723567
| 205
| 1,570
| 5.419512
| 0.234146
| 0.105311
| 0.093609
| 0.075608
| 0.79928
| 0.757876
| 0.757876
| 0.757876
| 0.715572
| 0.715572
| 0
| 0
| 0.194268
| 1,570
| 41
| 129
| 38.292683
| 0.878261
| 0.643312
| 0
| 0
| 0
| 0
| 0.297593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7a2acd5289cac8e6eac2758d8a6a491a5e1830df
| 7,635
|
py
|
Python
|
tests/unit/certificates/conftest.py
|
SpComb/hcloud-python
|
ed00e39dd2c2625cc0aae0e6153cee9c0b06c4af
|
[
"MIT"
] | 1
|
2021-06-01T03:15:08.000Z
|
2021-06-01T03:15:08.000Z
|
tests/unit/certificates/conftest.py
|
CPT-Jack-A-Castle/hcloud-python
|
1500fbc166df4d33c9a5dddcb45a06235b2f9514
|
[
"MIT"
] | null | null | null |
tests/unit/certificates/conftest.py
|
CPT-Jack-A-Castle/hcloud-python
|
1500fbc166df4d33c9a5dddcb45a06235b2f9514
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture()
def certificate_response():
return {
"certificate": {
"id": 2323,
"name": "My Certificate",
"type": "managed",
"labels": {},
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": {
"issuance": "failed",
"renewal": "scheduled",
"error": {
"code": "error_code",
"message": "error message"
}
},
"used_by": [
{
"id": 42,
"type": "server"
}
]
}
}
@pytest.fixture()
def create_managed_certificate_response():
return {
"certificate": {
"id": 2323,
"name": "My Certificate",
"type": "managed",
"labels": {},
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": {
"issuance": "pending",
"renewal": "scheduled",
"error": None
},
"used_by": [
{
"id": 42,
"type": "load_balancer"
}
]
},
"action": {
"id": 14,
"command": "issue_certificate",
"status": "success",
"progress": 100,
"started": "2021-01-30T23:55:00+00:00",
"finished": "2021-01-30T23:57:00+00:00",
"resources": [
{
"id": 896,
"type": "certificate"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
}
@pytest.fixture()
def two_certificates_response():
return {
"certificates": [
{
"id": 2323,
"name": "My Certificate",
"labels": {},
"type": "uploaded",
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": None,
"used_by": [
{
"id": 42,
"type": "load_balancer"
}
]
},
{
"id": 2324,
"name": "My website cert",
"labels": {},
"type": "uploaded",
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": None,
"used_by": [
{
"id": 42,
"type": "load_balancer"
}
]
}
]
}
@pytest.fixture()
def one_certificates_response():
return {
"certificates": [
{
"id": 2323,
"name": "My Certificate",
"labels": {},
"type": "uploaded",
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": None,
"used_by": [
{
"id": 42,
"type": "load_balancer"
}
]
}
]
}
@pytest.fixture()
def response_update_certificate():
return {
"certificate": {
"id": 2323,
"name": "New name",
"labels": {},
"type": "uploaded",
"certificate": "-----BEGIN CERTIFICATE-----\n...",
"created": "2019-01-08T12:10:00+00:00",
"not_valid_before": "2019-01-08T10:00:00+00:00",
"not_valid_after": "2019-07-08T09:59:59+00:00",
"domain_names": [
"example.com",
"webmail.example.com",
"www.example.com"
],
"fingerprint": "03:c7:55:9b:2a:d1:04:17:09:f6:d0:7f:18:34:63:d4:3e:5f",
"status": None,
"used_by": [
{
"id": 42,
"type": "load_balancer"
}
]
}
}
@pytest.fixture()
def response_get_actions():
return {
"actions": [
{
"id": 13,
"command": "change_protection",
"status": "success",
"progress": 100,
"started": "2016-01-30T23:55:00+00:00",
"finished": "2016-01-30T23:56:00+00:00",
"resources": [
{
"id": 14,
"type": "certificate"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
]
}
@pytest.fixture()
def response_retry_issuance_action():
return {
"action": {
"id": 14,
"command": "issue_certificate",
"status": "running",
"progress": 0,
"started": "2016-01-30T23:50+00:00",
"finished": None,
"resources": [
{
"id": 42,
"type": "certificate"
}
],
"error": {
"code": "action_failed",
"message": "Action failed"
}
}
}
| 29.941176
| 87
| 0.369221
| 647
| 7,635
| 4.25966
| 0.163833
| 0.065312
| 0.047896
| 0.039187
| 0.858491
| 0.810958
| 0.810958
| 0.765965
| 0.75508
| 0.735123
| 0
| 0.154417
| 0.470727
| 7,635
| 254
| 88
| 30.059055
| 0.527592
| 0
| 0
| 0.655462
| 0
| 0.02521
| 0.361624
| 0.133071
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| true
| 0
| 0.004202
| 0.029412
| 0.063025
| 0.02521
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a34aa955aad823e2bc987e211067ddb27c8e2d4
| 13,053
|
py
|
Python
|
examples/helloworld/python_image.py
|
jnadro/pybgfx
|
b08755355e05a83d6a6f49cc4710cd86c2de3f2f
|
[
"BSD-2-Clause"
] | 72
|
2016-03-01T03:54:25.000Z
|
2022-03-28T23:19:46.000Z
|
examples/helloworld/python_image.py
|
jnadro/pybgfx
|
b08755355e05a83d6a6f49cc4710cd86c2de3f2f
|
[
"BSD-2-Clause"
] | 3
|
2017-03-14T22:10:11.000Z
|
2021-05-22T07:43:43.000Z
|
examples/helloworld/python_image.py
|
jnadro/pybgfx
|
b08755355e05a83d6a6f49cc4710cd86c2de3f2f
|
[
"BSD-2-Clause"
] | 2
|
2017-09-18T08:47:20.000Z
|
2019-10-06T12:20:55.000Z
|
from ctypes import c_uint8
s_python_logo = (c_uint8*2160)(
*[
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x2e, 0x0f, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x3a, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x20, 0x04, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x20, 0x04, 0x3a, 0x04, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x7e, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04,
0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04,
0x20, 0x04, 0x37, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x24, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2e, 0x0f, 0x2e, 0x0f, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2e, 0x0f, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2e, 0x0f, 0x2e, 0x0f, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x3d, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x2e, 0x0f, 0x2e, 0x0f, 0x2b, 0x0b, 0x2b, 0x0b, 0x2b, 0x0b, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04, 0x20, 0x04,
0x20, 0x04, 0x2d, 0x0c, 0x3d, 0x0c, 0x20, 0x04, 0x70, 0x0b, 0x79, 0x0b, 0x62, 0x0b, 0x67, 0x0b, 0x66, 0x0b, 0x78, 0x0b, 0x20, 0x0b, 0x70, 0x0b, 0x79, 0x0b, 0x74, 0x0b, 0x68, 0x0b, 0x6f, 0x0b, 0x6e, 0x0b, 0x20, 0x0b, 0x62, 0x0b, 0x69, 0x0b, 0x6e, 0x0b, 0x64, 0x0b, 0x69, 0x0b, 0x6e, 0x0b, 0x67, 0x0b, 0x73, 0x0b, 0x20, 0x0b, 0x66, 0x0b, 0x6f, 0x0b, 0x72, 0x0b, 0x20, 0x0b, 0x62, 0x0b, 0x67, 0x0b, 0x66, 0x0b, 0x78, 0x0b, 0x20, 0x0b, 0x3d, 0x0c, 0x2d, 0x0c, 0x20, 0x04, 0x20, 0x04,
])
| 407.90625
| 480
| 0.665518
| 2,171
| 13,053
| 3.999539
| 0.016582
| 0.332604
| 0.46712
| 0.61177
| 0.974778
| 0.967408
| 0.967408
| 0.966486
| 0.962801
| 0.962801
| 0
| 0.524871
| 0.168314
| 13,053
| 31
| 481
| 421.064516
| 0.274963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.661917
| 0
| 0
| 1
| 0
| false
| 0
| 0.032258
| 0
| 0.032258
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
7a48747bf8ee04c027cf150a72f35a2d8fdba437
| 6,803
|
py
|
Python
|
ZoomAutomationBot.py
|
erknabd/zoomautomation
|
8cabf302a701ebaf6d6dbacc549053fb1d309c38
|
[
"MIT"
] | 8
|
2021-04-06T09:07:20.000Z
|
2021-05-09T14:47:44.000Z
|
ZoomAutomationBot.py
|
erknabd/zoomautomation
|
8cabf302a701ebaf6d6dbacc549053fb1d309c38
|
[
"MIT"
] | null | null | null |
ZoomAutomationBot.py
|
erknabd/zoomautomation
|
8cabf302a701ebaf6d6dbacc549053fb1d309c38
|
[
"MIT"
] | 3
|
2021-05-01T10:48:04.000Z
|
2021-05-09T14:47:46.000Z
|
from selenium import webdriver
import time
import keyboard
import datetime
x = datetime.datetime.now()
# Change <BROWSER NAME> according to your browser
driver_path = r"Your_Driver_Path"
browser = webdriver.<BROWSER NAME>(executable_path=driver_path)
# Days (Not change)
day1 = "Monday"
day2 = "Tuesday"
day3 = "Wednesday"
day4 = "Thursday"
day5 = "Friday"
# Lesson Times (You can Add or Remove according to your lessons)
lesson1 = "xx:xx:xx"
lesson2 = "xx:xx:xx"
lesson3 = "xx:xx:xx"
lesson4 = "xx:xx:xx"
lesson5 = "xx:xx:xx"
lesson6 = "xx:xx:xx"
lesson7 = "xx:xx:xx"
lesson8 = "xx:xx:xx"
# Lesson Names and Links (You can Add or Remove according to your lessons)
lesname1 = ("Zoom-link")
lesname2 = ("Zoom-link")
lesname3 = ("Zoom-link")
lesname4 = ("Zoom-link")
lesname5 = ("Zoom-link")
lesname6 = ("Zoom-link")
lesname7 = ("Zoom-link")
lesname8 = ("Zoom-link")
def open_lesson():
time.sleep(2)
keyboard.press_and_release('esc')
time.sleep(8)
log_in = browser.find_element_by_xpath("//*[@id='zoom-ui-frame']/div[2]/div/div[1]/div/div")
log_in.click()
time.sleep(1)
keyboard.press_and_release('left')
keyboard.press_and_release('enter')
time.sleep(8)
browser.close()
# Customize "lesnameX" according to your lesson time table
while True:
an = datetime.datetime.now()
hour = datetime.datetime.strftime(an, '%X')
if hour == lesson1 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson2 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson3 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson4 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson5 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson6 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson7 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson8 and day1 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson1 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson2 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson3 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson4 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson5 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson6 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson7 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson8 and day2 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson1 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson2 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson3 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson4 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson5 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson6 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson7 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson8 and day3 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson1 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson2 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson3 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson4 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson5 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson6 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson7 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson8 and day4 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson1 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson2 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson3 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson4 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson5 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson6 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson7 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
elif hour == lesson8 and day5 == x.strftime("%A"):
browser.get(lesnameX)
open_lesson()
break
else:
pass
| 26.574219
| 100
| 0.52859
| 763
| 6,803
| 4.63827
| 0.129751
| 0.115852
| 0.113026
| 0.192145
| 0.752472
| 0.752472
| 0.752472
| 0.752472
| 0.752472
| 0.730432
| 0
| 0.024061
| 0.346318
| 6,803
| 255
| 101
| 26.678431
| 0.771756
| 0.038071
| 0
| 0.598039
| 0
| 0.004902
| 0.05078
| 0.007648
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004902
| 0.019608
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a8c9c94c6651995637d57d3b38791a590b511d9
| 29,049
|
py
|
Python
|
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_options.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 22
|
2015-02-27T11:51:05.000Z
|
2022-02-28T12:39:29.000Z
|
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_options.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 16
|
2018-10-30T15:00:12.000Z
|
2019-01-11T17:55:13.000Z
|
tests/dhcpv6/kea_only/host_reservation/test_host_reservation_options.py
|
isc-projects/forge
|
dfec8b41003d6b5a229f69ee93616e0e5cc6d71b
|
[
"0BSD"
] | 11
|
2015-02-27T11:51:36.000Z
|
2021-03-30T08:33:54.000Z
|
"""Host Reservation including options DHCPv6 stored in MySQL database"""
# pylint: disable=invalid-name,line-too-long
import pytest
import srv_control
import misc
import srv_msg
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_mysql_duid_ll_matching_option():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::100', '$(EMPTY)', 'MySQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.upload_db_reservation('MySQL')
srv_control.config_srv('preference', 0, '123')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::100')
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_mysql_duid_ll_matching_option_no_address_1():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.upload_db_reservation('MySQL')
srv_control.config_srv('preference', 0, '123')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::100', expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_mysql_duid_ll_matching_option_no_address_2():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.upload_db_reservation('MySQL')
srv_control.config_srv('preference', 0, '123')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::100', expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_mysql_duid_ll_matching_option_inforequest():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.upload_db_reservation('MySQL')
srv_control.config_srv('preference', 0, '123')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_send_msg('INFOREQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_mysql_option_multiple():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('MySQL')
srv_control.new_db_backend_reservation('MySQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'MySQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::100', '$(EMPTY)', 'MySQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.option_db_record_reservation(21,
'srv1.example.com,srv2.isc.org',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.option_db_record_reservation(23,
'2001:db8::1,2001:db8::2',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
srv_control.option_db_record_reservation(59,
'http://www.kea-reserved.isc.org',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'MySQL',
1)
# Add option reservation code 60 value 10 space dhcp6 persistent 1 client class $(EMPTY) subnet id 1 and scope subnet to MySQL record id 1.
srv_control.upload_db_reservation('MySQL')
srv_control.config_srv('preference', 0, '123')
srv_control.config_srv_opt('sip-server-dns', 'srv4.example.com,srv5.isc.org')
# 21
srv_control.config_srv_opt('dns-servers', '2001:db8::4,2001:db8::5')
# 23
srv_control.config_srv_opt('bootfile-url', 'http://www.kea.isc.org')
# 59
srv_control.config_srv_opt('bootfile-param', '000B48656C6C6F20776F726C640003666F6F')
# 60
srv_control.config_srv_opt('new-tzdb-timezone', 'Europe/Zurich')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_requests_option(21)
srv_msg.client_requests_option(23)
srv_msg.client_requests_option(42)
srv_msg.client_requests_option(59)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::100')
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://www.kea-reserved.isc.org')
srv_msg.response_check_include_option(21)
srv_msg.response_check_option_content(21, 'addr', 'srv1.example.com.,srv2.isc.org.')
srv_msg.response_check_include_option(23)
srv_msg.response_check_option_content(23, 'addr', '2001:db8::1,2001:db8::2')
srv_msg.response_check_include_option(42)
srv_msg.response_check_option_content(42, 'optdata', 'Europe/Zurich')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_requests_option(21)
srv_msg.client_requests_option(42)
srv_msg.client_requests_option(23)
srv_msg.client_requests_option(59)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://www.kea.isc.org')
srv_msg.response_check_include_option(21)
srv_msg.response_check_option_content(21, 'addr', 'srv4.example.com.,srv5.isc.org.')
srv_msg.response_check_include_option(23)
srv_msg.response_check_option_content(23, 'addr', '2001:db8::4,2001:db8::5')
srv_msg.response_check_include_option(42)
srv_msg.response_check_option_content(42, 'optdata', 'Europe/Zurich')
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_pgsql_hwaddrr_matching_option():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'hw-address', 'f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::100', '$(EMPTY)', 'PostgreSQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.config_srv_opt('preference', '12')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:01:00:01:52:7b:a8:f0:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Response sub-option 5 from option 3 MUST contain address 3000::100.
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:22')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
# Response sub-option 5 from option 3 MUST contain address 3000::100.
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 12)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_pgsql_hwaddrr_matching_option_no_address():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'hw-address', 'f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.config_srv_opt('preference', '12')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:01:00:01:52:7b:a8:f0:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:22')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 12)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_pgsql_hwaddrr_matching_option_inforequest():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'hw-address', 'f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.upload_db_reservation('PostgreSQL')
srv_control.config_srv_opt('preference', '12')
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:01:00:01:52:7b:a8:f0:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_send_msg('INFOREQUEST')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'REPLY')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 12)
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:01:00:01:52:7b:a8:f0:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3, expect_include=False)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
@pytest.mark.v6
@pytest.mark.host_reservation
@pytest.mark.kea_only
@pytest.mark.reserved_options
def test_v6_host_reservation_pgsql_option_multiple():
misc.test_setup()
srv_control.config_srv_subnet('3000::/64', '3000::1-3000::ff')
srv_control.enable_db_backend_reservation('PostgreSQL')
srv_control.new_db_backend_reservation('PostgreSQL', 'duid', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_control.update_db_backend_reservation('dhcp6_subnet_id', 1, 'PostgreSQL', 1)
srv_control.ipv6_address_db_backend_reservation('3000::100', '$(EMPTY)', 'PostgreSQL', 1)
srv_control.option_db_record_reservation(7,
10,
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.option_db_record_reservation(21,
'srv1.example.com,srv2.isc.org',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.option_db_record_reservation(23,
'2001:db8::1,2001:db8::2',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
srv_control.option_db_record_reservation(59,
'http://www.kea-reserved.isc.org',
'dhcp6',
1,
'$(EMPTY)',
1,
'subnet',
'PostgreSQL',
1)
# Add option reservation code 60 value 10 space dhcp6 persistent 1 client class $(EMPTY) subnet id 1 and scope subnet to MySQL record id 1.
srv_control.upload_db_reservation('PostgreSQL')
srv_control.config_srv('preference', 0, '123')
srv_control.config_srv_opt('sip-server-dns', 'srv4.example.com,srv5.isc.org')
# 21
srv_control.config_srv_opt('dns-servers', '2001:db8::4,2001:db8::5')
# 23
srv_control.config_srv_opt('bootfile-url', 'http://www.kea.isc.org')
# 59
srv_control.config_srv_opt('new-tzdb-timezone', 'Europe/Zurich')
# 60 and not reserved
srv_control.build_and_send_config_files()
srv_control.start_srv('DHCP', 'started')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:01')
srv_msg.client_requests_option(7)
srv_msg.client_requests_option(21)
srv_msg.client_requests_option(23)
srv_msg.client_requests_option(42)
srv_msg.client_requests_option(59)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_suboption_content(5, 3, 'addr', '3000::100')
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 10)
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://www.kea-reserved.isc.org')
srv_msg.response_check_include_option(21)
srv_msg.response_check_option_content(21, 'addr', 'srv1.example.com.,srv2.isc.org.')
srv_msg.response_check_include_option(23)
srv_msg.response_check_option_content(23, 'addr', '2001:db8::1,2001:db8::2')
srv_msg.response_check_include_option(42)
srv_msg.response_check_option_content(42, 'optdata', 'Europe/Zurich')
misc.test_procedure()
srv_msg.client_sets_value('Client', 'DUID', '00:03:00:01:f6:f5:f4:f3:f2:21')
srv_msg.client_requests_option(7)
srv_msg.client_requests_option(21)
srv_msg.client_requests_option(23)
srv_msg.client_requests_option(59)
srv_msg.client_requests_option(42)
srv_msg.client_does_include('Client', 'client-id')
srv_msg.client_does_include('Client', 'IA-NA')
srv_msg.client_send_msg('SOLICIT')
misc.pass_criteria()
srv_msg.send_wait_for_message('MUST', 'ADVERTISE')
srv_msg.response_check_include_option(3)
srv_msg.response_check_option_content(3, 'sub-option', 5)
srv_msg.response_check_include_option(7)
srv_msg.response_check_option_content(7, 'value', 123)
srv_msg.response_check_include_option(59)
srv_msg.response_check_option_content(59, 'optdata', 'http://www.kea.isc.org')
srv_msg.response_check_include_option(21)
srv_msg.response_check_option_content(21, 'addr', 'srv4.example.com.,srv5.isc.org.')
srv_msg.response_check_include_option(23)
srv_msg.response_check_option_content(23, 'addr', '2001:db8::4,2001:db8::5')
srv_msg.response_check_include_option(42)
srv_msg.response_check_option_content(42, 'optdata', 'Europe/Zurich')
| 44.967492
| 143
| 0.607491
| 3,642
| 29,049
| 4.468149
| 0.043108
| 0.087753
| 0.094635
| 0.128434
| 0.986911
| 0.986911
| 0.986419
| 0.986419
| 0.986419
| 0.984514
| 0
| 0.060154
| 0.277221
| 29,049
| 645
| 144
| 45.037209
| 0.714898
| 0.019381
| 0
| 0.972973
| 0
| 0.007207
| 0.158107
| 0.04433
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016216
| true
| 0.036036
| 0.007207
| 0
| 0.023423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7abf36ea4f38922fa45728a3faa8a950ab5ed54b
| 1,124
|
py
|
Python
|
allure-pytest-bdd/test/labels_tests/labels_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | 1
|
2021-01-08T12:52:32.000Z
|
2021-01-08T12:52:32.000Z
|
allure-pytest-bdd/test/labels_tests/labels_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | null | null | null |
allure-pytest-bdd/test/labels_tests/labels_test.py
|
Duisus/allure-python
|
09402db43da00bb3edb59767d5cc3826457c3f1a
|
[
"Apache-2.0"
] | null | null | null |
from pytest_bdd import scenario
@scenario("labels_features\\tag_in_gherkin_or_scenario.feature",
"Tag in gherkin")
def test_tag_in_gherkin():
pass
@scenario("labels_features\\tag_in_gherkin_or_scenario.feature",
"Tag in scenario")
def test_tag_in_scenario():
pass
@scenario("labels_features\\tags_in_gherkin_and_scenario.feature",
"Different tag in gherkin and scenario")
def test_different_tags():
pass
@scenario("labels_features\\tags_in_gherkin_and_scenario.feature",
"Same tag in gherkin and scenario")
def test_same_tags():
pass
@scenario("labels_features\\many_tags.feature",
"Many tags in gherkin")
def test_many_tags_in_gherkin():
pass
@scenario("labels_features\\many_tags.feature",
"Many tags in scenario")
def test_many_tags_in_scenario():
pass
@scenario("labels_features\\many_tags.feature",
"Many tags in gherkin and scenario")
def test_many_tags_in_gherkin_and_scenario():
pass
@scenario("labels_features\\tags_in_feature.feature",
"Tag in feature")
def test_tag_in_feature():
pass
| 22.48
| 66
| 0.72331
| 152
| 1,124
| 4.973684
| 0.131579
| 0.142857
| 0.232804
| 0.240741
| 0.818783
| 0.808201
| 0.662698
| 0.519841
| 0.519841
| 0.519841
| 0
| 0
| 0.177046
| 1,124
| 49
| 67
| 22.938776
| 0.817297
| 0
| 0
| 0.454545
| 0
| 0
| 0.476868
| 0.311388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| true
| 0.242424
| 0.030303
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8fa6feae0823b64c0a81d995dc720ff4145a63c9
| 181
|
py
|
Python
|
python-algorithm/leetcode/problem_420.py
|
isudox/leetcode-solution
|
60085e64deaf396a171367affc94b18114565c43
|
[
"MIT"
] | 5
|
2017-06-11T09:19:34.000Z
|
2019-01-16T16:58:31.000Z
|
python-algorithm/leetcode/problem_420.py
|
isudox/leetcode-solution
|
60085e64deaf396a171367affc94b18114565c43
|
[
"MIT"
] | null | null | null |
python-algorithm/leetcode/problem_420.py
|
isudox/leetcode-solution
|
60085e64deaf396a171367affc94b18114565c43
|
[
"MIT"
] | 1
|
2019-03-02T15:50:43.000Z
|
2019-03-02T15:50:43.000Z
|
"""420. Strong Password Checker
https://leetcode.com/problems/strong-password-checker/
"""
class Solution:
def strongPasswordChecker(self, password: str) -> int:
pass
| 20.111111
| 58
| 0.707182
| 20
| 181
| 6.4
| 0.8
| 0.21875
| 0.328125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019737
| 0.160221
| 181
| 8
| 59
| 22.625
| 0.822368
| 0.458564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.666667
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
8fb49052501691edecf908352b1f526fc1e01552
| 84
|
py
|
Python
|
tests/import/import2a.py
|
sebastien-riou/micropython
|
116c15842fd48ddb77b0bc016341d936a0756573
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/import/import2a.py
|
sebastien-riou/micropython
|
116c15842fd48ddb77b0bc016341d936a0756573
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/import/import2a.py
|
sebastien-riou/micropython
|
116c15842fd48ddb77b0bc016341d936a0756573
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
from import1b import var
print(var)
from import1b import var as var2
print(var2)
| 10.5
| 32
| 0.77381
| 14
| 84
| 4.642857
| 0.5
| 0.369231
| 0.553846
| 0.646154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 0.178571
| 84
| 7
| 33
| 12
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
64e09fe575fbf8ffa09aed5720f2ecab5d2695f9
| 224
|
py
|
Python
|
iceworm/utils/__init__.py
|
wrmsr0/iceworm
|
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
|
[
"BSD-3-Clause"
] | null | null | null |
iceworm/utils/__init__.py
|
wrmsr0/iceworm
|
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
|
[
"BSD-3-Clause"
] | 1
|
2021-01-19T14:29:19.000Z
|
2021-01-19T14:34:27.000Z
|
iceworm/utils/__init__.py
|
wrmsr0/iceworm
|
09431bb3cdc4f6796aafca41e37d42ebe0ddfeef
|
[
"BSD-3-Clause"
] | 1
|
2020-12-31T22:29:52.000Z
|
2020-12-31T22:29:52.000Z
|
from . import inject # noqa
from . import secrets # noqa
from .utils import ReprFn # noqa
from .utils import build_dc_repr # noqa
from .utils import build_enum_value_map # noqa
from .utils import memoized_unary # noqa
| 32
| 47
| 0.758929
| 34
| 224
| 4.823529
| 0.441176
| 0.243902
| 0.317073
| 0.463415
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 224
| 6
| 48
| 37.333333
| 0.901099
| 0.129464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a4393decb6a90e2a2ecf41eaa9d4818fa703d896
| 48,596
|
py
|
Python
|
lammps_interface/mof_sbus.py
|
zmzeng/lammps_interface
|
07da45c444cadcab06683f3fea0fe4a781377365
|
[
"MIT"
] | 74
|
2016-10-19T23:42:21.000Z
|
2022-03-31T08:05:54.000Z
|
lammps_interface/mof_sbus.py
|
zmzeng/lammps_interface
|
07da45c444cadcab06683f3fea0fe4a781377365
|
[
"MIT"
] | 44
|
2017-01-22T02:25:12.000Z
|
2021-12-08T03:25:51.000Z
|
lammps_interface/mof_sbus.py
|
mwitman1/lammps_interface
|
4ebea5493df9e7f2381b7cad3cd5b6b2ae698a27
|
[
"MIT"
] | 46
|
2016-08-10T09:22:41.000Z
|
2022-03-01T03:33:14.000Z
|
"""
MOF sbus.
"""
import networkx as nx
import numpy as np
from scipy.spatial import distance
def add_distance_matrix(graph):
carts = []
if(float('.'.join(nx.__version__.split('.')[:2])) >= 2.0):
for j, data in sorted(list(graph.nodes(data=True))):
carts.append(data['cartesian_coordinates'])
else:
for j, data in sorted(graph.nodes_iter(data=True)):
carts.append(data['cartesian_coordinates'])
carts = np.array(carts)
graph.distance_matrix = distance.cdist(carts, carts)
InorganicCluster = {
'Cu':{'Cu Paddlewheel': nx.Graph(name='Cu Paddlewheel') # taken from doi: 10.1126/science.283.5405.1148
},
'Zn':{'Zn4O': nx.Graph(name='Zn4O'), # taken from doi:
'Zn Paddlewheel': nx.Graph(name='Zn Paddlewheel'), # taken from doi:
'Kuratowski': nx.Graph(name='Kuratowski')},
'Zr':{'Zr_UiO': nx.Graph(name='Zr_UiO') # taken from doi:
},
'Cr':{'Cr_tri': nx.Graph(name='Cr_tri') # taken from doi:
},
'V':{'V_pillar': nx.Graph(name='V_pillar') # taken from doi:
},
'Al':{'Al_pillar': nx.Graph(name='Al_pillar') # taken from doi:
}
}
OrganicCluster = {
'N':{'Thymine': nx.Graph(name='Thymine'),
'Adenine': nx.Graph(name='Adenine'),
'CarboxylateImidazolate': nx.Graph(name='CarboxylateImidazolate')},
'C':{'Benzene-2C': nx.Graph(name='Benzene-2C'),
'Biphenyl-2C': nx.Graph(name='Biphenyl-2C'),
'Triphenyl-2C': nx.Graph(name='Triphenyl-2C')
}
}
# add entry
InorganicCluster['Cu']['Cu Paddlewheel'].add_nodes_from([
(1, {'element':'O',
'special_flag': 'O1_Cu_pdw',
'cartesian_coordinates':np.array([1.755, -0.181, -1.376])
}
),
(2, {'element':'O',
'special_flag': 'O2_Cu_pdw',
'cartesian_coordinates':np.array([-1.755, 0.181, -1.376])
}
),
(3, {'element':'O',
'special_flag': 'O1_Cu_pdw',
'cartesian_coordinates':np.array([-0.181, 1.755, 1.376])
}
),
(4, {'element':'O',
'special_flag':'O2_Cu_pdw',
'cartesian_coordinates':np.array([0.181, -1.755, 1.376])
}
),
(5, {'element':'O',
'special_flag':'O1_Cu_pdw',
'cartesian_coordinates':np.array([-1.755, 0.181, 1.376])
}
),
(6, {'element':'O',
'special_flag':'O2_Cu_pdw',
'cartesian_coordinates':np.array([1.755, -0.181, 1.376])
}
),
(7, {'element':'O',
'special_flag':'O1_Cu_pdw',
'cartesian_coordinates':np.array([0.181, -1.755, -1.376])
}
),
(8, {'element':'O',
'special_flag':'O2_Cu_pdw',
'cartesian_coordinates':np.array([-0.181, 1.755, -1.376])
}
),
(9, {'element':'Cu',
'special_flag':'Cu_pdw',
'cartesian_coordinates':np.array([0.929, 0.929, 0.000])
}
),
(10, {'element':'Cu',
'special_flag':'Cu_pdw',
'cartesian_coordinates':np.array([-0.929, -0.929, 0.000])
}
),
(11, {'element':'C',
'special_flag':'C_Cu_pdw',
'cartesian_coordinates':np.array([1.233, -1.233, -1.810])
}
),
(12, {'element':'C',
'special_flag':'C_Cu_pdw',
'cartesian_coordinates':np.array([-1.233, 1.233, -1.810])
}
),
(13, {'element':'C',
'special_flag':'C_Cu_pdw',
'cartesian_coordinates':np.array([-1.233, 1.233, 1.810])
}
),
(14, {'element':'C',
'special_flag':'C_Cu_pdw',
'cartesian_coordinates':np.array([1.233, -1.233, 1.810])
}
)
])
InorganicCluster['Zn']['Zn Paddlewheel'].add_nodes_from([
(1, {'element':'O',
'special_flag': 'O1_Zn_pdw',
'cartesian_coordinates':np.array([-1.398, -1.339, 1.417])
}
),
(2, {'element':'O',
'special_flag': 'O2_Zn_pdw',
'cartesian_coordinates':np.array([-1.398, 0.853, -1.417])
}
),
(3, {'element':'O',
'special_flag': 'O1_Zn_pdw',
'cartesian_coordinates':np.array([-1.398, 0.853, 1.417])
}
),
(4, {'element':'O',
'special_flag':'O2_Zn_pdw',
'cartesian_coordinates':np.array([-1.398, -1.339, -1.417])
}
),
(5, {'element':'O',
'special_flag':'O1_Zn_pdw',
'cartesian_coordinates':np.array([1.398, -1.339, -1.417])
}
),
(6, {'element':'O',
'special_flag':'O2_Zn_pdw',
'cartesian_coordinates':np.array([1.398, 0.853, 1.417])
}
),
(7, {'element':'O',
'special_flag':'O1_Zn_pdw',
'cartesian_coordinates':np.array([1.398, 0.853, -1.417])
}
),
(8, {'element':'O',
'special_flag':'O2_Zn_pdw',
'cartesian_coordinates':np.array([1.398, -1.339, 1.417])
}
),
(9, {'element':'Zn',
'special_flag':'Zn_pdw',
'cartesian_coordinates':np.array([0.000, -1.717, 0.000])
}
),
(10, {'element':'Zn',
'special_flag':'Zn_pdw',
'cartesian_coordinates':np.array([0.000, 1.230, 0.000])
}
),
(11, {'element':'C',
'special_flag':'C_Zn_pdw',
'cartesian_coordinates':np.array([-1.761, -0.243, 1.837])
}
),
(12, {'element':'C',
'special_flag':'C_Zn_pdw',
'cartesian_coordinates':np.array([-1.761, -0.243, -1.837])
}
),
(13, {'element':'C',
'special_flag':'C_Zn_pdw',
'cartesian_coordinates':np.array([1.761, -0.243, 1.837])
}
),
(14, {'element':'C',
'special_flag':'C_Zn_pdw',
'cartesian_coordinates':np.array([1.761, -0.243, -1.837])
}
)
])
InorganicCluster['Zn']['Zn4O'].add_nodes_from([
(1, {'element':'Zn',
'special_flag':'Zn4O',
'cartesian_coordinates':np.array([-1.063000,-1.063000,-1.174000])
}
),
(2, {'element':'Zn',
'special_flag':'Zn4O',
'cartesian_coordinates':np.array([-1.062000,1.179000,1.067000])
}
),
(3, {'element':'Zn',
'special_flag':'Zn4O',
'cartesian_coordinates':np.array([1.179000,-1.063000,1.067000])
}
),
(4, {'element':'Zn',
'special_flag':'Zn4O',
'cartesian_coordinates':np.array([1.179000,1.178000,-1.175000])
}
),
(5, {'element':'O',
'special_flag':'O_z_Zn4O',
'cartesian_coordinates':np.array([0.058000,0.058000,-0.054000])
}
),
(6, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-2.939000,-0.765000,-0.876000])
}
),
(7, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-0.764000,0.883000,2.943000])
}
),
(8, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([0.881000,-2.938000,0.770000])
}
),
(9, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-2.938000,0.883000,0.770000])
}
),
(10, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-0.767000,-2.938000,-0.876000])
}
),
(11, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([0.882000,-0.764000,2.943000])
}
),
(12, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([3.055000,-0.766000,0.769000])
}
),
(13, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([0.881000,0.880000,-3.051000])
}
),
(14, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([3.055000,0.880000,-0.878000])
}
),
(15, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-0.766000,-0.766000,-3.050000])
}
),
(16, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([-0.764000,3.055000,0.769000])
}
),
(17, {'element':'O',
'special_flag':'O_c_Zn4O',
'cartesian_coordinates':np.array([0.882000,3.054000,-0.879000])
}
),
(18, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([3.541000,0.057000,-0.055000])
}
),
(19, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([0.059000,3.541000,-0.055000])
}
),
(20, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([0.057000,0.057000,-3.550000])
}
),
(21, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([-3.438000,0.059000,-0.053000])
}
),
(22, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([0.057000,-3.438000,-0.053000])
}
),
(23, {'element':'C',
'special_flag':'C_Zn4O',
'cartesian_coordinates':np.array([0.058000,0.058000,3.429000])
}
)
])
InorganicCluster['Zn']['Kuratowski'].add_nodes_from([
(1, {'element':'Zn',
'special_flag':'Zn_tet',
'cartesian_coordinates':np.array([2.079000,2.079000,-2.079000])
}
),
(2, {'element':'Cl',
'special_flag':'Cl_kuratowski',
'cartesian_coordinates':np.array([3.295000,3.295000,-3.295000])
}
),
(3, {'element':'Zn',
'special_flag':'Zn_tet',
'cartesian_coordinates':np.array([-2.079000,2.079000,2.079000])
}
),
(4, {'element':'Cl',
'special_flag':'Cl_kuratowski',
'cartesian_coordinates':np.array([-3.295000,3.295000,3.295000])
}
),
(5, {'element':'Zn',
'special_flag':'Zn_tet',
'cartesian_coordinates':np.array([2.079000,-2.079000,2.079000])
}
),
(6, {'element':'Cl',
'special_flag':'Cl_kuratowski',
'cartesian_coordinates':np.array([3.295000,-3.295000,3.295000])
}
),
(7, {'element':'Zn',
'special_flag':'Zn_tet',
'cartesian_coordinates':np.array([-2.079000,-2.079000,-2.079000])
}
),
(8, {'element':'Cl',
'special_flag':'Cl_kuratowski',
'cartesian_coordinates':np.array([-3.295000,-3.295000,-3.295000])
}
),
(9, {'element':'Zn',
'special_flag':'Zn_oct',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-0.000000])
}
),
(10, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([2.946000,0.770000,-0.770000])
}
),
(11, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([4.261000,-0.493000,0.493000])
}
),
(12, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-0.770000,2.946000,0.770000])
}
),
(13, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([0.493000,4.261000,-0.493000])
}
),
(14, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([0.770000,-0.770000,2.946000])
}
),
(15, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-0.493000,0.493000,4.261000])
}
),
(16, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([0.770000,2.946000,-0.770000])
}
),
(17, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-0.493000,4.261000,0.493000])
}
),
(18, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([2.946000,-0.770000,0.770000])
}
),
(19, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([4.261000,0.493000,-0.493000])
}
),
(20, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-0.770000,0.770000,2.946000])
}
),
(21, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([0.493000,-0.493000,4.261000])
}
),
(22, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-0.770000,-2.946000,-0.770000])
}
),
(23, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([0.493000,-4.261000,0.493000])
}
),
(24, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([0.770000,0.770000,-2.946000])
}
),
(25, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-0.493000,-0.493000,-4.261000])
}
),
(26, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([0.770000,-2.946000,0.770000])
}
),
(27, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-0.493000,-4.261000,-0.493000])
}
),
(28, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-0.770000,-0.770000,-2.946000])
}
),
(29, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([0.493000,0.493000,-4.261000])
}
),
(30, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-2.946000,0.770000,0.770000])
}
),
(31, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-4.261000,-0.493000,-0.493000])
}
),
(32, {'element':'N',
'special_flag':'N_tet',
'cartesian_coordinates':np.array([-2.946000,-0.770000,-0.770000])
}
),
(33, {'element':'C',
'special_flag':'C_kuratowski',
'cartesian_coordinates':np.array([-4.261000,0.493000,0.493000])
}
),
(34, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([2.211000,-0.000000,-0.000000])
}
),
(35, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([-0.000000,2.211000,-0.000000])
}
),
(36, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([-0.000000,-0.000000,2.211000])
}
),
(37, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([-0.000000,-2.211000,-0.000000])
}
),
(38, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-2.211000])
}
),
(39, {'element':'N',
'special_flag':'N_oct',
'cartesian_coordinates':np.array([-2.211000,-0.000000,-0.000000])
}
)
])
InorganicCluster['Zr']['Zr_UiO'].add_nodes_from([
(1, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-2.521000,0.000000])
}
),
(2, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([1.973000,-3.568000,0.000000])
}
),
(3, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-1.973000,-3.568000,0.000000])
}
),
(4, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-2.012000,-3.529000])
}
),
(5, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-2.012000,3.529000])
}
),
(6, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-3.568000,-1.973000])
}
),
(7, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-3.568000,1.973000])
}
),
(8, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-3.529000,-2.012000,0.000000])
}
),
(9, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([3.529000,-2.012000,0.000000])
}
),
(10, {'element':'O',
'special_flag':'O_h_Zr_UiO',
'cartesian_coordinates':np.array([1.161000,-1.200000,-1.161000])
}
),
(11, {'element':'O',
'special_flag':'O_h_Zr_UiO',
'cartesian_coordinates':np.array([-1.161000,-1.200000,1.161000])
}
),
(12, {'element':'O',
'special_flag':'O_z_Zr_UiO',
'cartesian_coordinates':np.array([1.161000,-1.200000,1.161000])
}
),
(13, {'element':'O',
'special_flag':'O_z_Zr_UiO',
'cartesian_coordinates':np.array([-1.161000,-1.200000,-1.161000])
}
),
(14, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-3.180000,-3.219000,0.000000])
}
),
(15, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([3.180000,-3.219000,0.000000])
}
),
(16, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-3.219000,3.180000])
}
),
(17, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-3.219000,-3.180000])
}
),
(18, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([2.482000,-0.039000,0.000000])
}
),
(19, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([-2.482000,-0.039000,0.000000])
}
),
(20, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,2.443000,0.000000])
}
),
(21, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-0.039000,2.482000])
}
),
(22, {'element':'Zr',
'special_flag':'Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,-0.039000,-2.482000])
}
),
(23, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([3.529000,-0.039000,1.973000])
}
),
(24, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-3.529000,-0.039000,1.973000])
}
),
(25, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-3.529000,-0.039000,-1.973000])
}
),
(26, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([3.529000,-0.039000,-1.973000])
}
),
(27, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([1.973000,3.490000,0.000000])
}
),
(28, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-1.973000,3.490000,0.000000])
}
),
(29, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,1.934000,3.529000])
}
),
(30, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,1.934000,-3.529000])
}
),
(31, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,3.490000,-1.973000])
}
),
(32, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,3.490000,1.973000])
}
),
(33, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([3.529000,1.934000,0.000000])
}
),
(34, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-3.529000,1.934000,0.000000])
}
),
(35, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([1.973000,-0.039000,-3.529000])
}
),
(36, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([1.973000,-0.039000,3.529000])
}
),
(37, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-1.973000,-0.039000,3.529000])
}
),
(38, {'element':'O',
'special_flag':'O_c_Zr_UiO',
'cartesian_coordinates':np.array([-1.973000,-0.039000,-3.529000])
}
),
(39, {'element':'O',
'special_flag':'O_h_Zr_UiO',
'cartesian_coordinates':np.array([-1.161000,1.122000,-1.161000])
}
),
(40, {'element':'O',
'special_flag':'O_h_Zr_UiO',
'cartesian_coordinates':np.array([1.161000,1.122000,1.161000])
}
),
(41, {'element':'O',
'special_flag':'O_z_Zr_UiO',
'cartesian_coordinates':np.array([-1.161000,1.122000,1.161000])
}
),
(42, {'element':'O',
'special_flag':'O_z_Zr_UiO',
'cartesian_coordinates':np.array([1.161000,1.122000,-1.161000])
}
),
(43, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([3.180000,-0.039000,-3.180000])
}
),
(44, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-3.180000,-0.039000,-3.180000])
}
),
(45, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-3.180000,-0.039000,3.180000])
}
),
(46, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([3.180000,-0.039000,3.180000])
}
),
(47, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-3.180000,3.141000,0.000000])
}
),
(48, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([3.180000,3.141000,0.000000])
}
),
(49, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,3.141000,-3.180000])
}
),
(50, {'element':'C',
'special_flag':'C_Zr_UiO',
'cartesian_coordinates':np.array([-0.000000,3.141000,3.180000])
}
),
(51, {'element':'H',
'special_flag':'H_o_Zr_UiO',
'cartesian_coordinates':np.array([1.881000,1.801000,1.666000])
}
),
(52, {'element':'H',
'special_flag':'H_o_Zr_UiO',
'cartesian_coordinates':np.array([-1.832000,-1.884000,1.722000])
}
),
(53, {'element':'H',
'special_flag':'H_o_Zr_UiO',
'cartesian_coordinates':np.array([-1.838000,1.795000,-1.728000])
}
),
(54, {'element':'H',
'special_flag':'H_o_Zr_UiO',
'cartesian_coordinates':np.array([1.871000,-1.866000,-1.695000])
}
)
])
InorganicCluster['Cr']['Cr_tri'].add_nodes_from([
(1, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([2.267000,-1.345000,1.482000])
}
),
(2, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-0.321000,-2.272000,1.374000])
}
),
(3, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-1.353000,-2.006000,2.059000])
}
),
(4, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-2.299000,-1.290000,1.482000])
}
),
(5, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-1.808000,1.414000,1.374000])
}
),
(6, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-1.061000,2.175000,2.059000])
}
),
(7, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([0.032000,2.636000,1.482000])
}
),
(8, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([2.128000,0.859000,1.374000])
}
),
(9, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([2.414000,-0.169000,2.059000])
}
),
(10, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([2.267000,-1.345000,-1.477000])
}
),
(11, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-0.321000,-2.272000,-1.369000])
}
),
(12, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-1.353000,-2.006000,-2.054000])
}
),
(13, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-2.299000,-1.290000,-1.477000])
}
),
(14, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([-1.808000,1.414000,-1.369000])
}
),
(15, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-1.061000,2.175000,-2.054000])
}
),
(16, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([0.032000,2.636000,-1.477000])
}
),
(17, {'element':'O',
'special_flag':'O',
'cartesian_coordinates':np.array([2.128000,0.859000,-1.369000])
}
),
(18, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([2.414000,-0.169000,-2.054000])
}
),
(19, {'element':'Cr',
'special_flag':'Cr_tri',
'cartesian_coordinates':np.array([0.918000,-1.740000,0.002000])
}
),
(20, {'element':'Cr',
'special_flag':'Cr_tri',
'cartesian_coordinates':np.array([-1.966000,0.075000,0.002000])
}
),
(21, {'element':'Cr',
'special_flag':'Cr_tri',
'cartesian_coordinates':np.array([1.048000,1.665000,0.002000])
}
),
(22, {'element':'O',
'special_flag':'O_z_Cr_tri',
'cartesian_coordinates':np.array([0.000000,0.000000,0.002000])
}
)
])
InorganicCluster['V']['V_pillar'].add_nodes_from([
(1, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([-3.335000,1.411000,1.192000])
}
),
(2, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([-1.088000,-1.401000,1.345000])
}
),
(3, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([0.073000,-1.411000,-1.136000])
}
),
(4, {'element':'C',
'special_flag':'C_V_pillar',
'cartesian_coordinates':np.array([-2.221000,-1.831000,1.655000])
}
),
(5, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([-1.088000,1.401000,1.345000])
}
),
(6, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([0.073000,1.411000,-1.136000])
}
),
(7, {'element':'C',
'special_flag':'C_V_pillar',
'cartesian_coordinates':np.array([-2.221000,1.831000,1.655000])
}
),
(8, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([-3.335000,-1.411000,1.192000])
}
),
(9, {'element':'O',
'special_flag':'O_z_V_pillar',
'cartesian_coordinates':np.array([-2.201000,0.000000,-0.786000])
}
),
(10, {'element':'V',
'special_flag':'V_pillar',
'cartesian_coordinates':np.array([-0.327000,0.000000,0.179000])
}
),
(11, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([2.321000,1.401000,-1.289000])
}
),
(12, {'element':'C',
'special_flag':'C_V_pillar',
'cartesian_coordinates':np.array([1.187000,1.831000,-1.599000])
}
),
(13, {'element':'O',
'special_flag':'O_c_V_pillar',
'cartesian_coordinates':np.array([2.321000,-1.401000,-1.289000])
}
),
(14, {'element':'C',
'special_flag':'C_V_pillar',
'cartesian_coordinates':np.array([1.187000,-1.831000,-1.599000])
}
),
(15, {'element':'V',
'special_flag':'V_pillar',
'cartesian_coordinates':np.array([3.082000,0.000000,-0.123000])
}
),
(16, {'element':'O',
'special_flag':'O_z_V_pillar',
'cartesian_coordinates':np.array([1.208000,0.000000,0.842000])
}
)
])
InorganicCluster['Al']['Al_pillar'].add_nodes_from([
(1, {'element':'O',
'special_flag':'O_c_Al_pillar',
'cartesian_coordinates':np.array([-1.215000,1.107000,-0.732000])
}
),
(2, {'element':'O',
'special_flag':'O_c_Al_pillar',
'cartesian_coordinates':np.array([1.383000,-1.106000,-0.464000])
}
),
(3, {'element':'O',
'special_flag':'O_c_Al_pillar',
'cartesian_coordinates':np.array([1.383000,1.107000,-0.464000])
}
),
(4, {'element':'O',
'special_flag':'O_c_Al_pillar',
'cartesian_coordinates':np.array([-1.215000,-1.106000,-0.732000])
}
),
(5, {'element':'Al',
'special_flag':'Al_pillar',
'cartesian_coordinates':np.array([-0.102000,-1.657000,0.608000])
}
),
(6, {'element':'O',
'special_flag':'O_z_Al_pillar',
'cartesian_coordinates':np.array([-0.102000,0.000000,1.473000])
}
),
(7, {'element':'C',
'special_flag':'C_Al_pillar',
'cartesian_coordinates':np.array([2.005000,0.000000,-0.744000])
}
),
(8, {'element':'C',
'special_flag':'C_Al_pillar',
'cartesian_coordinates':np.array([-1.849000,0.000000,-0.976000])
}
),
(9, {'element':'H',
'special_flag':'H_Al_pillar',
'cartesian_coordinates':np.array([-0.121000,-0.071000,2.580000])
}
)#,
#(10, {'element':'Al',
# 'special_flag':'Al_pillar',
# 'cartesian_coordinates':np.array([-0.102000,1.658000,0.608000])
# }
# )
])
OrganicCluster['N']['Adenine'].add_nodes_from([
(1, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-0.108000,-0.237000,0.527000])
}
),
(2, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([0.853000,-2.150000,0.700000])
}
),
(3, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([0.550000,-0.540000,-0.675000])
}
),
(4, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-0.074000,1.419000,-1.600000])
}
),
(5, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([-0.796000,0.992000,0.603000])
}
),
(6, {'element':'H',
'special_flag':'Hd',
'cartesian_coordinates':np.array([-1.914000,2.348000,1.629000])
}
),
(7, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([-1.599000,0.804000,2.476000])
}
),
(8, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([1.193000,-3.098000,1.104000])
}
),
(9, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([-0.080000,2.127000,-2.431000])
}
),
(10, {'element':'N',
'special_flag':'N',
'cartesian_coordinates':np.array([0.121000,-1.283000,1.403000])
}
),
(11, {'element':'N',
'special_flag':'N',
'cartesian_coordinates':np.array([1.133000,-1.761000,-0.560000])
}
),
(12, {'element':'N',
'special_flag':'N',
'cartesian_coordinates':np.array([0.617000,0.283000,-1.751000])
}
),
(13, {'element':'N',
'special_flag':'Na',
'cartesian_coordinates':np.array([-0.763000,1.773000,-0.514000])
}
),
(14, {'element':'N',
'special_flag':'Nd',
'cartesian_coordinates':np.array([-1.424000,1.447000,1.691000])
}
)
])
OrganicCluster['N']['Thymine'].add_nodes_from([
(1, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([13.966000,16.972000,12.145000])
}
),
(2, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([12.549000,18.380000,13.950000])
}
),
(3, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([11.714000,19.119000,14.888000])
}
),
(4, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([13.016000,17.103000,14.220000])
}
),
(5, {'element':'N',
'special_flag':'Ndw',
'cartesian_coordinates':np.array([13.714000,16.442000,13.316000])
}
),
(6, {'element':'O',
'special_flag':'Oa2',
'cartesian_coordinates':np.array([14.542000,16.323000,11.289000])
}
),
(7, {'element':'O',
'special_flag':'Oaw',
'cartesian_coordinates':np.array([12.755000,16.528000,15.269000])
}
),
(8, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([10.864000,18.500000,15.184000])
}
),
(9, {'element':'H',
'special_flag':'Hdw',
'cartesian_coordinates':np.array([14.003000,15.581000,13.493000])
}
),
(10, {'element':'C',
'special_flag':'C',
'cartesian_coordinates':np.array([12.877000,18.890000,12.738000])
}
),
(11, {'element':'N',
'special_flag':'Nd2',
'cartesian_coordinates':np.array([13.557000,18.186000,11.867000])
}
),
(12, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([12.293000,19.381000,15.776000])
}
),
(13, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([11.316000,20.039000,14.453000])
}
),
(14, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([12.585000,19.801000,12.470000])
}
),
(15, {'element':'H',
'special_flag':'Hd2',
'cartesian_coordinates':np.array([13.727000,18.544000,11.021000])
}
)
])
OrganicCluster['N']['CarboxylateImidazolate'].add_nodes_from([
(1, {'element':'C',
'special_flag':'C13',
'cartesian_coordinates':np.array([-0.325000,-0.797000,0.755000])
}
),
(2, {'element':'N',
'special_flag':'N20',
'cartesian_coordinates':np.array([-0.712000,0.499000,0.760000])
}
),
(3, {'element':'C',
'special_flag':'N20',
'cartesian_coordinates':np.array([-0.133000,1.108000,-0.263000])
}
),
(4, {'element':'C',
'special_flag':'C13',
'cartesian_coordinates':np.array([0.616000,0.148000,-0.885000])
}
),
(5, {'element':'N',
'special_flag':'N20',
'cartesian_coordinates':np.array([0.512000,-1.071000,-0.265000])
}
),
(6, {'element':'H',
'special_flag':'8H13',
'cartesian_coordinates':np.array([1.218000,0.325000,-1.764000])
}
),
(7, {'element':'H',
'special_flag':'H',
'cartesian_coordinates':np.array([-0.314000,2.158000,-0.439000])
}
),
(8, {'element':'C',
'special_flag':'C1',
'cartesian_coordinates':np.array([-0.843000,-1.760000,1.840000])
}
),
(9, {'element':'O',
'special_flag':'O2',
'cartesian_coordinates':np.array([-0.453000,-3.062000,1.835000])
}
),
(10, {'element':'O',
'special_flag':'O3',
'cartesian_coordinates':np.array([-1.690000,-1.307000,2.803000])
}
)
])
# Note, the special_flags for the organic linkers below are designed to be compatible
# with the Dubbeldam force field, so changing these values will break if one requests
# the Dubbeldam FF.
OrganicCluster['C']['Benzene-2C'].add_nodes_from([
(1, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-1.401000])
}
),
(2, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([-0.000000,-0.000000,1.399000])
}
),
(3, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.858000,0.858000,-0.700000])
}
),
(4, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,-1.239000])
}
),
(5, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.857000,0.857000,0.700000])
}
),
(6, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,1.241000])
}
),
(7, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.858000,-0.858000,-0.700000])
}
),
(8, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,-1.239000])
}
),
(9, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.857000,-0.857000,0.700000])
}
),
(10, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,1.241000])
}
)
])
OrganicCluster['C']['Biphenyl-2C'].add_nodes_from([
(1, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([0.000000,0.000000,-3.571000])
}
),
(2, {'element':'C',
'special_flag':'Ce',
'cartesian_coordinates':np.array([0.000000,0.000000,-0.771000])
}
),
(3, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([0.000000,0.000000,3.569000])
}
),
(4, {'element':'C',
'special_flag':'Ce',
'cartesian_coordinates':np.array([0.000000,0.000000,0.771000])
}
),
(5, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([1.519000,-1.519000,0.928000])
}
),
(6, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([0.858000,-0.858000,1.469000])
}
),
(7, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.858000,0.858000,-2.870000])
}
),
(8, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,-3.409000])
}
),
(9, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([-0.857000,0.857000,-1.470000])
}
),
(10, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([-1.519000,1.519000,-0.929000])
}
),
(11, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,3.412000])
}
),
(12, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.858000,0.858000,2.872000])
}
),
(13, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,3.412000])
}
),
(14, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.858000,-0.858000,2.872000])
}
),
(15, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([-1.519000,1.519000,0.928000])
}
),
(16, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([-0.858000,0.858000,1.469000])
}
),
(17, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.858000,-0.858000,-2.870000])
}
),
(18, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,-3.409000])
}
),
(19, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([0.857000,-0.857000,-1.470000])
}
),
(20, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([1.519000,-1.519000,-0.929000])
}
)
])
OrganicCluster['C']['Triphenyl-2C'].add_nodes_from([
(1, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-5.741000])
}
),
(2, {'element':'C',
'special_flag':'Ce',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-2.941000])
}
),
(3, {'element':'C',
'special_flag':'Cf',
'cartesian_coordinates':np.array([-0.000000,-0.000000,1.399000])
}
),
(4, {'element':'C',
'special_flag':'Cb',
'cartesian_coordinates':np.array([-0.000000,-0.000000,5.741000])
}
),
(5, {'element':'C',
'special_flag':'Ce',
'cartesian_coordinates':np.array([-0.000000,-0.000000,2.941000])
}
),
(6, {'element':'C',
'special_flag':'Cf',
'cartesian_coordinates':np.array([-0.000000,-0.000000,-1.399000])
}
),
(7, {'element':'H',
'special_flag':'Hc',
'cartesian_coordinates':np.array([1.519000,-1.519000,-1.242000])
}
),
(8, {'element':'C',
'special_flag':'Cg',
'cartesian_coordinates':np.array([0.858000,-0.858000,-0.701000])
}
),
(9, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.858000,0.858000,-5.040000])
}
),
(10, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,-5.579000])
}
),
(11, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([-0.857000,0.857000,-3.640000])
}
),
(12, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([-1.519000,1.519000,-3.099000])
}
),
(13, {'element':'H',
'special_flag':'Hc',
'cartesian_coordinates':np.array([-1.519000,1.519000,1.242000])
}
),
(14, {'element':'C',
'special_flag':'Cg',
'cartesian_coordinates':np.array([-0.858000,0.858000,0.701000])
}
),
(15, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.858000,-0.858000,5.040000])
}
),
(16, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,5.579000])
}
),
(17, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([0.857000,-0.857000,3.640000])
}
),
(18, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([1.519000,-1.519000,3.099000])
}
),
(19, {'element':'H',
'special_flag':'Hc',
'cartesian_coordinates':np.array([1.519000,-1.519000,1.242000])
}
),
(20, {'element':'C',
'special_flag':'Cg',
'cartesian_coordinates':np.array([0.858000,-0.858000,0.701000])
}
),
(21, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([-0.858000,0.858000,5.040000])
}
),
(22, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([-1.519000,1.519000,5.579000])
}
),
(23, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([-0.857000,0.857000,3.640000])
}
),
(24, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([-1.519000,1.519000,3.099000])
}
),
(25, {'element':'H',
'special_flag':'Hc',
'cartesian_coordinates':np.array([-1.519000,1.519000,-1.242000])
}
),
(26, {'element':'C',
'special_flag':'Cg',
'cartesian_coordinates':np.array([-0.858000,0.858000,-0.701000])
}
),
(27, {'element':'C',
'special_flag':'Cc',
'cartesian_coordinates':np.array([0.858000,-0.858000,-5.040000])
}
),
(28, {'element':'H',
'special_flag':'Ha',
'cartesian_coordinates':np.array([1.519000,-1.519000,-5.579000])
}
),
(29, {'element':'C',
'special_flag':'Cd',
'cartesian_coordinates':np.array([0.857000,-0.857000,-3.640000])
}
),
(30, {'element':'H',
'special_flag':'Hb',
'cartesian_coordinates':np.array([1.519000,-1.519000,-3.099000])
}
)
])
# compute the distance matrix
add_distance_matrix(InorganicCluster['Cu']['Cu Paddlewheel'])
add_distance_matrix(InorganicCluster['Zn']['Zn Paddlewheel'])
add_distance_matrix(InorganicCluster['Zn']['Zn4O'])
add_distance_matrix(InorganicCluster['Zn']['Kuratowski'])
add_distance_matrix(InorganicCluster['Zr']['Zr_UiO'])
add_distance_matrix(InorganicCluster['Cr']['Cr_tri'])
add_distance_matrix(InorganicCluster['V']['V_pillar'])
add_distance_matrix(InorganicCluster['Al']['Al_pillar'])
add_distance_matrix(OrganicCluster['N']['Adenine'])
add_distance_matrix(OrganicCluster['N']['Thymine'])
add_distance_matrix(OrganicCluster['N']['CarboxylateImidazolate'])
add_distance_matrix(OrganicCluster['C']['Benzene-2C'])
add_distance_matrix(OrganicCluster['C']['Biphenyl-2C'])
add_distance_matrix(OrganicCluster['C']['Triphenyl-2C'])
| 31.012125
| 111
| 0.50179
| 5,406
| 48,596
| 4.326674
| 0.083981
| 0.250534
| 0.273707
| 0.335913
| 0.863745
| 0.811159
| 0.794186
| 0.787345
| 0.781702
| 0.770928
| 0
| 0.187328
| 0.291032
| 48,596
| 1,566
| 112
| 31.031928
| 0.491568
| 0.010536
| 0
| 0.499019
| 0
| 0
| 0.297364
| 0.129424
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000654
| false
| 0
| 0.001962
| 0
| 0.002616
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a44853a719d1cc203688d7dbb70ade94c5c8b4bb
| 16,258
|
py
|
Python
|
layers.py
|
Luisiglm/Omics-Graph-Neural-Nets-
|
75a4f72ff62c930fd907f4b232b307445ca0c06a
|
[
"MIT"
] | 1
|
2021-11-12T10:58:35.000Z
|
2021-11-12T10:58:35.000Z
|
layers.py
|
Luisiglm/Omics-Graph-Neural-Nets-
|
75a4f72ff62c930fd907f4b232b307445ca0c06a
|
[
"MIT"
] | null | null | null |
layers.py
|
Luisiglm/Omics-Graph-Neural-Nets-
|
75a4f72ff62c930fd907f4b232b307445ca0c06a
|
[
"MIT"
] | null | null | null |
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import Layer
class gat(Layer):
""" Graph Attention Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Initialization Args:
adj: a gene x gene numpy array
units: the number of features to obtain.
activation: a keras activation function.
Parameters:
w: a features x gat.units tf variable .
ai: a gat.units x genes tf variable that corresponds to the self attention parameter.
aj: a gat.units x genes tf variable that is the attention to adjacent nodes parameter.
"""
def __init__(self,adj,units, activation = None):
super(gat, self).__init__()
self.id = tf.cast(np.identity(adj.shape[1]), dtype = "float32")
self.adj = tf.cast(adj, dtype ='float32')
self.activation = tf.keras.activations.get(activation)
self.units = units
self.nodes = adj.shape[1]
def build(self, input_shape):
w_init = tf.keras.initializers.GlorotNormal(seed=None)
self.w = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
self.ai = tf.Variable(name = "self_attn",
initial_value = w_init(shape = (self.units,self.nodes), dtype = "float32"),
trainable = True)
self.aj = tf.Variable(name = "other_attn",
initial_value = w_init(shape = (self.units,self.nodes), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w) # batch x nodes x features'
self_attn = tf.matmul(f,self.ai) # batch x nodes x nodes
other_attn = tf.transpose(tf.matmul(f,self.aj), perm = [0, 2, 1]) # batch x nodes x nodes
attn = tf.math.add(self_attn,other_attn)
attn = tf.math.add(attn, -1e09*(1-(self.adj+self.id)))# it should be batch x nodes x nodes
attn = tf.nn.softmax(attn, axis = 1)
# transpose this bad boy.
f = tf.transpose(f,perm=[0, 2, 1])# batch x features' x nodes //
f = tf.matmul(f,attn)# message passing.
f = tf.transpose(f,perm=[0, 2, 1]) # transpose again!
return(self.activation(f), attn)# activate and poom!
class gate(Layer):
""" Gated Graph Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Initialization Args:
adj: a gene x gene numpy array
units: the number of features to obtain.
activation: a keras activation function.
Parameters:
w: a features x gat.units tf variable.
w_2: a features x gat.units tf variable.
ai: a gat.units x genes tf variable that corresponds to the self gate parameter.
aj: a gat.units x genes tf variable that is the gate to adjacent nodes parameter.
"""
def __init__(self,adj,units, activation = None):
super(gate, self).__init__()
self.id = tf.cast(np.identity(adj.shape[1]), dtype = "float32")
self.adj = tf.cast(adj, dtype ='float32')
self.activation = tf.keras.activations.get(activation)
self.units = units
def build(self, input_shape):
w_init = tf.keras.initializers.GlorotNormal(seed=None)
self.w = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
self.ai = tf.Variable(name = "self_attn",
initial_value = w_init(shape = (self.units,self.adj.shape[1]), dtype = "float32"),
trainable = True)
self.aj = tf.Variable(name = "other_attn",
initial_value = w_init(shape = (self.units,self.adj.shape[1]), dtype = "float32"),
trainable = True)
self.w_2 = tf.Variable(name = "weight_2",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
self.b_a = tf.Variable(name = "other_gate",
initial_value = w_init(shape = (self.adj.shape[0],self.adj.shape[1]), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w) # batch x nodes x features'
self_attn = tf.matmul(f,self.ai) # batch x nodes x nodes
other_attn = tf.transpose(tf.matmul(f,self.aj), perm = [0, 2, 1]) # batch x nodes x nodes
attn = tf.math.add(self_attn,other_attn)
attn = tf.math.add(attn, -1e09*(1-(self.adj)))# it should be batch x nodes x nodes
gate_h = tf.keras.activations.sigmoid(attn+self.b_a)
# transpose this bad boy.
f = tf.transpose(f,perm=[0, 2, 1])# batch x features' x nodes //
f = tf.matmul(f,gate_h)# message passing.
f = tf.transpose(f,perm=[0, 2, 1]) # transpose again!
f = tf.add(f, tf.matmul(inputs, self.w_2))
return(self.activation(f), gate_h)# activate and poom!
class gcn(Layer):
""" Graph Convolution Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Initialization Args:
adj: a gene x gene numpy array
units: the number of features to obtain.
activation: a keras activation function.
Parameters:
w: a features x gat.units tf variable.
"""
def __init__(self,adj,units, activation = None):
super(gcn, self).__init__()
self.id = tf.cast(np.identity(adj.shape[1]), dtype = "float32")
self.adj = tf.cast(adj, dtype ='float32')
self.d = tf.math.multiply(self.id,tf.math.reduce_sum(self.adj, axis = 0)**-.5)
self.L = self.id-tf.matmul(self.d,tf.matmul(self.adj,self.d))
self.activation = tf.keras.activations.get(activation)
self.units = units
self.nodes = adj.shape[1]
def build(self, input_shape):
w_init = tf.keras.initializers.RandomNormal( mean=0.0, stddev=1/np.sum(input_shape[-1]), seed=None)
self.w = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w) # batch x nodes x features'
# transpose this bad boy.
f = tf.transpose(f,perm=[0, 2, 1])# batch x features' x nodes //
f = tf.matmul(f,self.L)# message passing.
f = tf.transpose(f,perm=[0, 2, 1]) # transpose again!
return(self.activation(f))# activate and poom!
class gpool(Layer):
""" Graph Pooling Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Args:
adj: a batch x genes x genes tensor object.
units: the number of features to obtain.
activation: a keras activation function.
w: a features x gat.units tf variable.
"""
def __init__(self, adj, units, activation = None):
super(gpool, self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
self.id = tf.cast(np.identity(adj.shape[1]), dtype = "float32")
self.adj = tf.cast(adj, dtype = 'float32')
def build(self, input_shape):
w_init = tf.keras.initializers.GlorotNormal(seed=None)
self.w1 = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
self.w2 = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w1) # batch x nodes x features'
h = tf.transpose(inputs,perm=[0, 2, 1])# batch x features' x nodes //
h = tf.matmul(h,self.adj)# message passing.
h = tf.transpose(h,perm=[0, 2, 1]) # transpose again!
f = tf.add(tf.matmul(h, self.w2),f)
# transpose this bad boy.
return(self.activation(f))# activate and poom!
class gpool_ad(Layer):
""" Graph Pooling Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Args:
adj: a batch x genes x genes tensor object.
units: the number of features to obtain.
activation: a keras activation function.
w: a features x gat.units tf variable.
"""
def __init__(self,units, activation = None):
super(gpool_ad, self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
def build(self, input_shape):
w_init = tf.keras.initializers.RandomNormal( mean=0.0, stddev=1/np.sum(input_shape[-1]), seed=None)
self.w = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs, adj):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w) # batch x nodes x features'
# transpose this bad boy.
f = tf.transpose(f,perm=[0, 2, 1])# batch x features' x nodes //
f = tf.matmul(f,adj)# message passing.
f = tf.transpose(f,perm=[0, 2, 1]) # transpose again!
return(self.activation(f))# activate and poom!
class gated_pool_ad(Layer):
""" Gated Pooling Layer adapted to genomic data.
Takes as input a tf float 32 object of shape batch x genes x features
Args:
adj: a batch x genes x genes tensor object.
units: the number of features to obtain.
activation: a keras activation function.
w: a features x gat.units tf variable.
w_2: a features x gat.units tf Variable.
"""
def __init__(self,units, activation = None):
super(gated_pool_ad, self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
def build(self, input_shape):
w_init = tf.keras.initializers.RandomNormal( mean=0.0, stddev=1/np.sum(input_shape[-1]), seed=None)
self.w = tf.Variable(name = "weight",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
self.w_2 = tf.Variable(name = "weight_2",
initial_value = w_init(shape = (input_shape[2],self.units), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs, adj):
# batch x nodes x features //inputs
f = tf.matmul(inputs,self.w) # batch x nodes x features'
# transpose this bad boy.
f = tf.transpose(f,perm=[0, 2, 1])# batch x features' x nodes //
f = tf.matmul(f,adj)# message passing.
f = tf.transpose(f,perm=[0, 2, 1]) # transpose again!
f = tf.add(f, tf.matmul(inputs, self.w_2))
return(self.activation(f))# activate and poom!
class fully_3d(Layer):
def __init__(self, units, activation = None):
super(fully_3d,self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
def build(self, input_shape):
w_init = tf.random_normal_initializer()
self.w_g = tf.Variable(name = "kernel",
initial_value = w_init(shape = (input_shape[1],input_shape[2]),
dtype = "float32"),
trainable = True)
self.w = tf.Variable(name = "kernel",
initial_value = w_init(shape = (input_shape[2],self.units),
dtype = "float32"),
trainable = True)
b_init = tf.random_normal_initializer()
self.b = tf.Variable(name = "bias",
initial_value = b_init(shape = (self.units,), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self,inputs):
inpts = tf.math.add(inputs,self.w_g)#+self.b_g# zero out the variables we aren't using.
return(self.activation(tf.matmul(inpts, self.w)+self.b))
class path_fc(Layer):
""" Local MLP Layer.
Takes as input a tf float 32 object of shape batch x genes x features
Initialization Args:
adj: a gene x gene numpy array
units: the number of features to obtain.
activation: a keras activation function.
Parameters:
w: a features x gat.units tf variable.
"""
def __init__(self,paths,units, activation = None):
super(path_fc, self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
self.paths = tf.cast(paths, dtype = "float32")
self.no_paths = paths.shape[1]
def build(self, input_shape):
w_init = tf.random_normal_initializer()
self.w = tf.Variable(name = "kernel",
initial_value = w_init(shape = (input_shape[2],self.units),
dtype = "float32"),
trainable = True)
self.w_paths = tf.Variable(name = "kernel",
initial_value = w_init(shape = (input_shape[1],self.no_paths),
dtype = "float32"),
trainable = True)
b_init = tf.random_normal_initializer()
self.b = tf.Variable(name = "bias",
initial_value = b_init(shape = (self.units,), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self, inputs):
# batch x nodes x features //inputs
h = tf.matmul(inputs,self.w)
h = h + self.b
h = tf.transpose(h, perm = [0, 2, 1])
w2 = tf.math.multiply(self.paths,self.w_paths)
f = tf.matmul(h,w2)
# transpose this bad boy.
f = tf.transpose(f, perm = [0,2,1])
return(self.activation(f))# activate and poom!
class fully_3d_ne(Layer):
def __init__(self, units, activation = None):
super(fully_3d_ne,self).__init__()
self.activation = tf.keras.activations.get(activation)
self.units = units
def build(self, input_shape):
w_init = tf.random_normal_initializer()
self.w = tf.Variable(name = "kernel",
initial_value = w_init(shape = (input_shape[2],self.units),
dtype = "float32"),
trainable = True)
b_init = tf.random_normal_initializer()
self.b = tf.Variable(name = "bias",
initial_value = b_init(shape = (self.units,), dtype = "float32"),
trainable = True)
super().build(input_shape)
def call(self,inputs):
return(self.activation(tf.matmul(inputs, self.w)+self.b))
| 49.117825
| 119
| 0.562246
| 2,094
| 16,258
| 4.258357
| 0.071633
| 0.040372
| 0.034541
| 0.06168
| 0.906807
| 0.893798
| 0.888079
| 0.887406
| 0.876192
| 0.864416
| 0
| 0.017402
| 0.324886
| 16,258
| 330
| 120
| 49.266667
| 0.795007
| 0.238467
| 0
| 0.76
| 0
| 0
| 0.031725
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.013333
| 0.004444
| 0.173333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ef7d798926c8b5e03f63b732f6250a14266e417
| 13,792
|
py
|
Python
|
zokyo/utils/data_format_conversions.py
|
toyotaconnected-India/zokyo
|
6086ce7c014dd76b28756596ada3559eacc06d44
|
[
"Apache-2.0"
] | null | null | null |
zokyo/utils/data_format_conversions.py
|
toyotaconnected-India/zokyo
|
6086ce7c014dd76b28756596ada3559eacc06d44
|
[
"Apache-2.0"
] | null | null | null |
zokyo/utils/data_format_conversions.py
|
toyotaconnected-India/zokyo
|
6086ce7c014dd76b28756596ada3559eacc06d44
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Contributors : [srinivas.v@toyotaconnected.co.in,srivathsan.govindarajan@toyotaconnected.co.in,
# harshavardhan.thirupathi@toyotaconnected.co.in,
# ashok.ramadass@toyotaconnected.com ]
from pathlib import Path
import json
import xml.etree.ElementTree as ET
def coco_std_2_voc(coco_ann_path, save_folder=None, database=None):
"""
Function convert given standard COCO annotation json file to Pascal VOC annotation.
If save_folder is not given then output is saved in the same folder
"""
coco_ann_path = Path(coco_ann_path)
if not database:
database = 'Unspecified'
if not save_folder:
save_folder = coco_ann_path.parent
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
with open(coco_ann_path, 'r') as f:
coco_ann = json.load(f)
categories = {cat['id']: cat['name'] for cat in coco_ann['categories']}
categories = dict(sorted(categories.items()))
voc_names = ''
for i, cat in categories.items():
voc_names += cat + '\n'
with open(f'{save_folder}/voc.names', 'w') as f:
f.write(voc_names)
for i, img in enumerate(coco_ann['images']):
root = ET.Element('annotation')
img_path = Path(img['file_name'])
fold = ET.Element('folder')
fold.text = str(img_path.parent)
root.append(fold)
file_name = ET.Element('filename')
file_name.text = img_path.name
root.append(file_name)
path = ET.Element('path')
path.text = str(img_path)
root.append(path)
source = ET.Element('source')
db = ET.SubElement(source, 'database')
db.text = database
root.append(source)
size = ET.Element('size')
width = ET.SubElement(size, 'width')
width.text = str(int(img['width']))
height = ET.SubElement(size, 'height')
height.text = str(int(img['height']))
depth = ET.SubElement(size, 'depth')
depth.text = str(3)
root.append(size)
for ann in coco_ann['annotations']:
if img['id'] == ann['image_id']:
obj = ET.Element('object')
name = ET.Element('name')
name.text = categories[ann['category_id']]
obj.append(name)
pose = ET.Element('pose')
if 'pose' in ann:
pose.text = str(ann['pose'])
else:
pose.text = 'Unspecified'
obj.append(pose)
truncated = ET.Element('truncated')
if 'truncated' in ann:
truncated.text = str(ann['truncated'])
else:
truncated.text = 'Unspecified'
obj.append(truncated)
bndbox = ET.Element('bndbox')
xmin = ET.SubElement(bndbox, 'xmin')
xmin.text = str(int(ann['bbox'][0]))
ymin = ET.SubElement(bndbox, 'ymin')
ymin.text = str(int(ann['bbox'][1]))
xmax = ET.SubElement(bndbox, 'xmax')
xmax.text = str(int(ann['bbox'][0] + ann['bbox'][2]))
ymax = ET.SubElement(bndbox, 'ymax')
ymax.text = str(int(ann['bbox'][1] + ann['bbox'][3]))
obj.append(bndbox)
root.append(obj)
tree = ET.ElementTree(root)
with open(f'{save_folder}/{i}.xml', 'wb') as f:
tree.write(f)
def voc_2_coco_std(voc_folder_path, save_folder=None, info=None):
"""
Function convert given Pascal VOC annotations to standard COCO annotation json file.
If save_folder is not given then output is saved in the same folder
"""
voc_folder_path = Path(voc_folder_path)
if not info:
info = {}
if not save_folder:
save_folder = voc_folder_path
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
coco_ann = {}
voc_names_path = list(voc_folder_path.glob('*.names'))[0]
with open(voc_names_path, 'r') as f:
voc_names = [n.rstrip('\n') for n in f.readlines()]
categories = [{"id": i, "name": cat} for i, cat in enumerate(voc_names, 1)]
coco_ann['info'] = info
coco_ann['categories'] = categories
voc_ann_paths = sorted(list(voc_folder_path.glob('*.xml')))
if len(voc_ann_paths) == 0:
raise Exception('Empty Voc directory')
imgs = []
anns = []
j = 0
for i, voc_ann_path in enumerate(voc_ann_paths):
root = ET.parse(voc_ann_path).getroot()
img = {}
img['id'] = i
size = root.find('size')
img['width'] = int(size.find('width').text)
img['height'] = int(size.find('height').text)
img['file_name'] = root.find('path').text
imgs.append(img)
for obj in root.findall('object'):
ann = {}
ann['id'] = j
ann['image_id'] = i
ann['category_id'] = voc_names.index(obj.find('name').text) + 1
bbox = obj.find('bndbox')
x = int(bbox.find('xmin').text)
y = int(bbox.find('ymin').text)
w = int(bbox.find('xmax').text) - x
h = int(bbox.find('ymax').text) - y
ann['area'] = w * h
ann['bbox'] = [x, y, w, h]
j += 1
anns.append(ann)
coco_ann['images'] = imgs
coco_ann['annotations'] = anns
with open(f'{save_folder}/coco_ann.json', 'w') as f:
json.dump(coco_ann, f)
def coco_std_2_coco_toyo(coco_ann_path, save_folder=None):
"""
Function convert given standard COCO annotation json file to Toyo COCO annotation json files.
If save_folder is not given then output is saved in the same folder
"""
coco_ann_path = Path(coco_ann_path)
if not save_folder:
save_folder = coco_ann_path.parent
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
with open(coco_ann_path, 'r') as f:
coco_ann = json.load(f)
coco_toyo_ann = {}
coco_toyo_ann['info'] = coco_ann['info']
coco_toyo_ann['categories'] = coco_ann['categories']
for i, img in enumerate(coco_ann['images']):
coco_toyo_ann['images'] = [img]
anns = []
for j, ann in enumerate(coco_ann['annotations']):
if img['id'] == ann['image_id']:
anns.append(ann)
coco_toyo_ann['annotations'] = anns
with open(f'{save_folder}/{i}.json', 'w') as f:
json.dump(coco_toyo_ann, f)
def coco_toyo_2_coco_std(coco_toyo_ann_folder, save_folder=None):
"""
Function convert given Toyo COCO annotation json files to standard COCO annotation json file.
If save_folder is not given then output is saved in the same folder
"""
coco_toyo_ann_folder = Path(coco_toyo_ann_folder)
if not save_folder:
save_folder = coco_toyo_ann_folder.parent
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
coco_toyo_ann_paths = sorted(list(coco_toyo_ann_folder.glob('*.json')))
coco_ann = {}
imgs = []
anns = []
for _, coco_toyo_ann_path in enumerate(coco_toyo_ann_paths):
with open(coco_toyo_ann_path, 'r') as f:
coco_toyo_ann = json.load(f)
coco_ann['info'] = coco_toyo_ann['info']
coco_ann['categories'] = coco_toyo_ann['categories']
imgs.append(coco_toyo_ann['images'][0])
anns.extend(coco_toyo_ann['annotations'])
coco_ann['images'] = sorted(imgs, key=lambda x: x['id'])
coco_ann['annotations'] = sorted(anns, key=lambda x: x['id'])
with open(f'{save_folder}/coco_ann.json', 'w') as f:
json.dump(coco_ann, f)
def coco_toyo_2_voc(coco_toyo_ann_folder, save_folder=None, database=None):
"""
Function convert given Toyo COCO annotation json files to Pascal VOC annotation.
If save_folder is not given then output is saved in the same folder
"""
coco_toyo_ann_folder = Path(coco_toyo_ann_folder)
if not database:
database = 'Unspecified'
if not save_folder:
save_folder = coco_toyo_ann_folder.parent
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
coco_toyo_ann_paths = sorted(list(coco_toyo_ann_folder.glob('*.json')))
with open(coco_toyo_ann_paths[0], 'r') as f:
coco_toyo_ann = json.load(f)
categories = {cat['id']: cat['name']
for cat in coco_toyo_ann['categories']}
categories = dict(sorted(categories.items()))
voc_names = ''
for i, cat in categories.items():
voc_names += cat + '\n'
with open(f'{save_folder}/voc.names', 'w') as f:
f.write(voc_names)
for i, coco_toyo_ann_path in enumerate(coco_toyo_ann_paths):
with open(coco_toyo_ann_path, 'r') as f:
coco_toyo_ann = json.load(f)
img = coco_toyo_ann['images'][0]
root = ET.Element('annotation')
img_path = Path(img['file_name'])
fold = ET.Element('folder')
fold.text = str(img_path.parent)
root.append(fold)
file_name = ET.Element('filename')
file_name.text = img_path.name
root.append(file_name)
path = ET.Element('path')
path.text = str(img_path)
root.append(path)
source = ET.Element('source')
db = ET.SubElement(source, 'database')
db.text = database
root.append(source)
size = ET.Element('size')
width = ET.SubElement(size, 'width')
width.text = str(int(img['width']))
height = ET.SubElement(size, 'height')
height.text = str(int(img['height']))
depth = ET.SubElement(size, 'depth')
depth.text = str(3)
root.append(size)
for ann in coco_toyo_ann['annotations']:
if img['id'] == ann['image_id']:
obj = ET.Element('object')
name = ET.Element('name')
name.text = categories[ann['category_id']]
obj.append(name)
pose = ET.Element('pose')
if 'pose' in ann:
pose.text = str(ann['pose'])
else:
pose.text = 'Unspecified'
obj.append(pose)
truncated = ET.Element('truncated')
if 'truncated' in ann:
truncated.text = str(ann['truncated'])
else:
truncated.text = 'Unspecified'
obj.append(truncated)
bndbox = ET.Element('bndbox')
xmin = ET.SubElement(bndbox, 'xmin')
xmin.text = str(int(ann['bbox'][0]))
ymin = ET.SubElement(bndbox, 'ymin')
ymin.text = str(int(ann['bbox'][1]))
xmax = ET.SubElement(bndbox, 'xmax')
xmax.text = str(int(ann['bbox'][0] + ann['bbox'][2]))
ymax = ET.SubElement(bndbox, 'ymax')
ymax.text = str(int(ann['bbox'][1] + ann['bbox'][3]))
obj.append(bndbox)
root.append(obj)
tree = ET.ElementTree(root)
with open(f'{save_folder}/{coco_toyo_ann_path.stem}.xml', 'wb') as f:
tree.write(f)
def voc_2_coco_toyo(voc_folder_path, save_folder=None, info=None):
"""
Function convert given Pascal VOC annotation to Toyo COCO annotation json files.
If save_folder is not given then output is saved in the same folder
"""
voc_folder_path = Path(voc_folder_path)
if not info:
info = {}
if not save_folder:
save_folder = voc_folder_path
else:
save_folder = Path(save_folder)
if not save_folder.is_dir():
raise Exception('Save path should be a directory')
coco_toyo_ann = {}
voc_names_path = list(voc_folder_path.glob('*.names'))[0]
with open(voc_names_path, 'r') as f:
voc_names = [n.rstrip('\n') for n in f.readlines()]
categories = [{"id": i, "name": cat} for i, cat in enumerate(voc_names, 1)]
coco_toyo_ann['info'] = info
coco_toyo_ann['categories'] = categories
voc_ann_paths = sorted(list(voc_folder_path.glob('*.xml')))
if len(voc_ann_paths) == 0:
raise Exception('Empty Voc directory')
imgs = []
anns = []
j = 0
for i, voc_ann_path in enumerate(voc_ann_paths):
root = ET.parse(voc_ann_path).getroot()
img = {}
img['id'] = i
size = root.find('size')
img['width'] = int(size.find('width').text)
img['height'] = int(size.find('height').text)
img['fil_ename'] = root.find('path').text
imgs.append(img)
for obj in root.findall('object'):
ann = {}
ann['id'] = j
ann['image_id'] = i
ann['category_id'] = voc_names.index(obj.find('name').text) + 1
bbox = obj.find('bndbox')
x = int(bbox.find('xmin').text)
y = int(bbox.find('ymin').text)
w = int(bbox.find('xmax').text) - x
h = int(bbox.find('ymax').text) - y
ann['area'] = w * h
ann['bbox'] = [x, y, w, h]
j += 1
anns.append(ann)
coco_toyo_ann['images'] = imgs
coco_toyo_ann['annotations'] = anns
imgs = []
anns = []
with open(f'{save_folder}/{i}.json', 'w') as f:
json.dump(coco_toyo_ann, f)
| 31.20362
| 101
| 0.568735
| 1,827
| 13,792
| 4.120963
| 0.079365
| 0.06641
| 0.061363
| 0.023908
| 0.921636
| 0.90251
| 0.882322
| 0.876478
| 0.85868
| 0.852437
| 0
| 0.003714
| 0.297274
| 13,792
| 441
| 102
| 31.274376
| 0.773112
| 0.081932
| 0
| 0.872131
| 0
| 0
| 0.114997
| 0.016645
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019672
| false
| 0
| 0.009836
| 0
| 0.029508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8efccfffd892ee42a0ffe231c51e5bfc944a948a
| 4,008
|
py
|
Python
|
gym/envs/F16/AeroBenchVVPython/code/RunF16Sim.py
|
caffett/gym
|
a9126ace2488acfaa63544ad14859d530ee4ac76
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
gym/envs/F16/AeroBenchVVPython/code/RunF16Sim.py
|
caffett/gym
|
a9126ace2488acfaa63544ad14859d530ee4ac76
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
gym/envs/F16/AeroBenchVVPython/code/RunF16Sim.py
|
caffett/gym
|
a9126ace2488acfaa63544ad14859d530ee4ac76
|
[
"Python-2.0",
"OLDAP-2.7"
] | null | null | null |
'''
Stanley Bak
RunF16Sim python version
'''
import numpy as np
from scipy.integrate import RK45
from controlledF16 import controlledF16
def RunF16Sim(initialState, tMax, der_func, F16Model, ap, llc, pass_fail, sim_step=0.01, multipliers=None):
'Simulates and analyzes autonomous F-16 maneuvers'
# append integral error states to state vector
initialState = np.array(initialState, dtype=float)
x0 = np.zeros((initialState.shape[0] + llc.get_num_integrators() + ap.get_num_integrators(),))
x0[:initialState.shape[0]] = initialState
# run the numerical simulation
times = [0]
states = [x0]
modes = [ap.state]
_, u, Nz, ps, _ = controlledF16(times[-1], states[-1], F16Model, ap, llc, multipliers=multipliers)
Nz_list = [Nz]
ps_list = [ps]
u_list = [u]
rk45 = RK45(der_func, times[-1], states[-1], tMax)
while rk45.status == 'running':
rk45.step()
if rk45.t > times[-1] + sim_step:
dense_output = rk45.dense_output()
while rk45.t > times[-1] + sim_step:
t = times[-1] + sim_step
times.append(t)
states.append(dense_output(t))
updated = ap.advance_discrete_state(times[-1], states[-1])
modes.append(ap.state)
# re-run dynamics function at current state to get non-state variables
xd, u, Nz, ps, Ny_r = controlledF16(times[-1], states[-1], F16Model, ap, llc, multipliers=multipliers)
pass_fail.advance(times[-1], states[-1], ap.state, xd, u, Nz, ps, Ny_r)
Nz_list.append(Nz)
ps_list.append(ps)
u_list.append(u)
if updated:
rk45 = RK45(der_func, times[-1], states[-1], tMax)
print("at time {}, state changes to {}".format(times[-1], ap.state))
break
if pass_fail.break_on_error and not pass_fail.result():
break
if pass_fail.break_on_error and not pass_fail.result():
break
result = pass_fail.result()
# make sure the solver didn't fail
if rk45.status != 'finished':
result = False # fail
# print(times)
# assert False
return result, times, states, modes, ps_list, Nz_list, u_list
def RunF16Sim_discete_time(initialState, tMax, F16Model, ap, llc, pass_fail, sim_step=0.01, multipliers=None):
'Simulates and analyzes autonomous F-16 maneuvers'
# append integral error states to state vector
initialState = np.array(initialState, dtype=float)
x0 = np.zeros((initialState.shape[0] + llc.get_num_integrators() + ap.get_num_integrators(),))
x0[:initialState.shape[0]] = initialState
# run the numerical simulation
times = [0]
states = [x0]
modes = [ap.state]
xd, u, Nz, ps, Ny_r = controlledF16(times[-1], states[-1], F16Model, ap, llc, multipliers=multipliers)
Nz_list = [Nz]
ps_list = [ps]
u_list = [u]
while times[-1] < tMax:
t = times[-1] + sim_step
state = states[-1]+sim_step*xd
times.append(t)
states.append(state)
updated = ap.advance_discrete_state(times[-1], states[-1])
modes.append(ap.state)
if updated:
print("at time {}, state changes to {}".format(times[-1], ap.state))
# re-run dynamics function at current state to get non-state variables
xd, u, Nz, ps, Ny_r = controlledF16(times[-1], states[-1], F16Model, ap, llc, multipliers=multipliers)
pass_fail.advance(times[-1], states[-1], ap.state, xd, u, Nz, ps, Ny_r)
Nz_list.append(Nz)
ps_list.append(ps)
u_list.append(u)
if pass_fail.break_on_error and not pass_fail.result():
result = False
break
# result = pass_fail.result()
# print(times)
# print(u_list[:5])
# print(states[:6])
# assert False
return result, times, states, modes, ps_list, Nz_list, u_list
| 33.680672
| 118
| 0.605289
| 536
| 4,008
| 4.393657
| 0.192164
| 0.043312
| 0.050955
| 0.055202
| 0.814437
| 0.763057
| 0.747771
| 0.747771
| 0.747771
| 0.720594
| 0
| 0.03558
| 0.270709
| 4,008
| 119
| 119
| 33.680672
| 0.770099
| 0.143463
| 0
| 0.767123
| 0
| 0
| 0.049288
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027397
| false
| 0.109589
| 0.041096
| 0
| 0.09589
| 0.027397
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
2d0a28114bc3ab769f4b8d7039ed5bf48a3bb752
| 120
|
py
|
Python
|
Operators/Selection/__init__.py
|
mjbogusz/TSPGen
|
4916cf6276fda41b73ebdf24a7969167c63d0650
|
[
"MIT"
] | null | null | null |
Operators/Selection/__init__.py
|
mjbogusz/TSPGen
|
4916cf6276fda41b73ebdf24a7969167c63d0650
|
[
"MIT"
] | null | null | null |
Operators/Selection/__init__.py
|
mjbogusz/TSPGen
|
4916cf6276fda41b73ebdf24a7969167c63d0650
|
[
"MIT"
] | null | null | null |
from Operators.Selection.Selector import Selector
from Operators.Selection.TournamentSelector import TournamentSelector
| 40
| 69
| 0.9
| 12
| 120
| 9
| 0.5
| 0.240741
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 120
| 2
| 70
| 60
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
742a8be10c4c217737d25f821b63af98c8b22766
| 6,288
|
py
|
Python
|
tests/test_configs.py
|
hipster-philology/pandora
|
613fc114a3faf2b49c7094d756583d0385b1ab65
|
[
"MIT"
] | 8
|
2017-11-06T14:04:56.000Z
|
2021-10-04T10:38:23.000Z
|
tests/test_configs.py
|
hipster-philology/pandora
|
613fc114a3faf2b49c7094d756583d0385b1ab65
|
[
"MIT"
] | 95
|
2017-02-06T13:49:43.000Z
|
2018-06-14T07:51:30.000Z
|
tests/test_configs.py
|
hipster-philology/pandora
|
613fc114a3faf2b49c7094d756583d0385b1ab65
|
[
"MIT"
] | 3
|
2017-02-08T21:15:39.000Z
|
2018-04-09T13:46:13.000Z
|
from unittest import TestCase
import os
import shutil
from pandora.tagger import Tagger
from pandora.tagger import MODELS
TEST, DEV, TRAIN = "data/geste/test", "data/geste/dev", "data/geste/train"
class TestConfigLoader(TestCase):
def setUp(self):
try:
os.makedirs("fake_model")
except:
pass
def tearDown(self):
shutil.rmtree("fake_model")
def test_load(self):
""" Ensure params are correctly loaded """
tagger = Tagger(config_path="./tests/test_configs/config_chrestien.txt")
self.assertEqual(tagger.nb_encoding_layers, 2, "nb_encoding_layers should be correctly loaded")
self.assertEqual(tagger.nb_epochs, 3, "nb_epochs should be correctly loaded")
self.assertEqual(tagger.nb_dense_dims, 1000, "nb_dense_dims should be correctly loaded")
self.assertEqual(tagger.batch_size, 100, "batch_size should be correctly loaded")
self.assertEqual(tagger.nb_left_tokens, 2, "nb_left_tokens should be correctly loaded")
self.assertEqual(tagger.nb_right_tokens, 1, "nb_right_tokens should be correctly loaded")
self.assertEqual(tagger.nb_context_tokens, 3, "nb_context_tokens should be correctly computed")
self.assertEqual(tagger.nb_embedding_dims, 100, "nb_embedding_dims should be correctly loaded")
self.assertEqual(tagger.model_dir, "fake_model", "model_dir should be correctly loaded")
self.assertEqual(tagger.postcorrect, False, "postcorrect should be correctly loaded")
self.assertEqual(tagger.nb_filters, 100, "nb_filters should be correctly loaded")
self.assertEqual(tagger.filter_length, 3, "filter_length should be correctly loaded")
self.assertEqual(tagger.focus_repr, "convolutions", "focus_repr should be correctly loaded")
self.assertEqual(tagger.dropout_level, 0.15, "dropout_level should be correctly loaded")
self.assertEqual(tagger.include_token, True, "include_token should be correctly loaded")
self.assertEqual(tagger.include_context, True, "include_context should be correctly loaded")
self.assertEqual(tagger.include_lemma, "label", "include_lemma should be correctly loaded")
self.assertEqual(tagger.include_pos, True, "include_pos should be correctly loaded")
self.assertEqual(tagger.include_morph, False, "include_morph should be correctly loaded")
self.assertEqual(tagger.include_dev, True, "include_dev should be correctly loaded")
self.assertEqual(tagger.include_test, True, "include_test should be correctly loaded")
self.assertEqual(tagger.min_token_freq_emb, 5, "min_token_freq_emb should be correctly loaded")
self.assertEqual(tagger.halve_lr_at, 75, "halve_lr_at should be correctly loaded")
self.assertEqual(tagger.max_token_len, 20, "max_token_len should be correctly loaded")
self.assertEqual(tagger.min_lem_cnt, 1, "min_lem_cnt should be correctly loaded")
def test_load_after_save(self):
""" Ensure param are correctly saved """
tagger = Tagger.setup_from_disk(
config_path="./tests/test_configs/config_chrestien.txt", train_data=TRAIN, dev_data=DEV, test_data=TEST
)
tagger.include_pos = False
tagger.curr_nb_epochs = 10
tagger.save_params()
self.assertEqual(tagger.pretrainer.nb_workers, 1, "Pretrainer Workers should be correctly loaded")
del tagger
tagger = Tagger(config_path="./fake_model/config.txt")
self.assertEqual(tagger.nb_encoding_layers, 2, "nb_encoding_layers should be correctly loaded")
self.assertEqual(tagger.nb_epochs, 3, "nb_epochs should be correctly loaded")
self.assertEqual(tagger.nb_dense_dims, 1000, "nb_dense_dims should be correctly loaded")
self.assertEqual(tagger.batch_size, 100, "batch_size should be correctly loaded")
self.assertEqual(tagger.nb_left_tokens, 2, "nb_left_tokens should be correctly loaded")
self.assertEqual(tagger.nb_right_tokens, 1, "nb_right_tokens should be correctly loaded")
self.assertEqual(tagger.nb_context_tokens, 3, "nb_context_tokens should be correctly computed")
self.assertEqual(tagger.nb_embedding_dims, 100, "nb_embedding_dims should be correctly loaded")
self.assertEqual(tagger.model_dir, "fake_model", "model_dir should be correctly loaded")
self.assertEqual(tagger.postcorrect, False, "postcorrect should be correctly loaded")
self.assertEqual(tagger.nb_filters, 100, "nb_filters should be correctly loaded")
self.assertEqual(tagger.filter_length, 3, "filter_length should be correctly loaded")
self.assertEqual(tagger.focus_repr, "convolutions", "focus_repr should be correctly loaded")
self.assertEqual(tagger.dropout_level, 0.15, "dropout_level should be correctly loaded")
self.assertEqual(tagger.include_token, True, "include_token should be correctly loaded")
self.assertEqual(tagger.include_context, True, "include_context should be correctly loaded")
self.assertEqual(tagger.include_lemma, "label", "include_lemma should be correctly loaded")
self.assertEqual(tagger.include_pos, False, "include_pos should be correctly loaded")
self.assertEqual(tagger.include_morph, False, "include_morph should be correctly loaded")
self.assertEqual(tagger.include_dev, True, "include_dev should be correctly loaded")
self.assertEqual(tagger.include_test, True, "include_test should be correctly loaded")
self.assertEqual(tagger.min_token_freq_emb, 5, "min_token_freq_emb should be correctly loaded")
self.assertEqual(tagger.halve_lr_at, 75, "halve_lr_at should be correctly loaded")
self.assertEqual(tagger.max_token_len, 20, "max_token_len should be correctly loaded")
self.assertEqual(tagger.min_lem_cnt, 1, "min_lem_cnt should be correctly loaded")
self.assertEqual(tagger.curr_nb_epochs, 10, "Current number of epochs should be correctly loaded")
self.assertEqual(tagger.model, "PyTorch", "PyTorch implementation is loaded")
tagger = Tagger(config_path="./fake_model/config.txt", load=True)
self.assertIsInstance(tagger.model, MODELS["PyTorch"], "PyTorch implementation is loaded")
| 67.612903
| 115
| 0.74014
| 825
| 6,288
| 5.435152
| 0.134545
| 0.177297
| 0.248216
| 0.256467
| 0.863515
| 0.835861
| 0.835861
| 0.831178
| 0.782783
| 0.782783
| 0
| 0.011679
| 0.16937
| 6,288
| 92
| 116
| 68.347826
| 0.846831
| 0.010814
| 0
| 0.6
| 0
| 0
| 0.389202
| 0.020629
| 0
| 0
| 0
| 0
| 0.675
| 1
| 0.05
| false
| 0.0125
| 0.0625
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7435e823772eaac230f997774c3e89cfb23ab7f2
| 6,899
|
py
|
Python
|
src/randonet/pytorch/rnn.py
|
ahgamut/randonet
|
b55241809318d70e97c7718b3fcdc91a7219f269
|
[
"MIT"
] | null | null | null |
src/randonet/pytorch/rnn.py
|
ahgamut/randonet
|
b55241809318d70e97c7718b3fcdc91a7219f269
|
[
"MIT"
] | null | null | null |
src/randonet/pytorch/rnn.py
|
ahgamut/randonet
|
b55241809318d70e97c7718b3fcdc91a7219f269
|
[
"MIT"
] | null | null | null |
from randonet.generator.param import (
Param,
IntParam,
FloatParam,
BinaryParam,
ChoiceParam,
TupleParam,
)
from randonet.generator.unit import Unit, Factory as _Factory
from randonet.generator.conv import ConvFactory, ConvTransposeFactory
from collections import namedtuple
class RNN(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"RNN",
[
"input_size",
"hidden_size",
"num_layers",
"bias",
"batch_first",
"dropout",
"bidirectional",
],
)
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
num_layers=IntParam(name="num_layers", default=1),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
batch_first=BinaryParam(name="batch_first", default=False, true_prob=0.5),
dropout=IntParam(name="dropout", default=0.0),
bidirectional=BinaryParam(
name="bidirectional", default=False, true_prob=0.5
),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class LSTM(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"LSTM",
[
"input_size",
"hidden_size",
"num_layers",
"bias",
"batch_first",
"dropout",
"bidirectional",
],
)
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
num_layers=IntParam(name="num_layers", default=1),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
batch_first=BinaryParam(name="batch_first", default=False, true_prob=0.5),
dropout=IntParam(name="dropout", default=0.0),
bidirectional=BinaryParam(
name="bidirectional", default=False, true_prob=0.5
),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class GRU(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"GRU",
[
"input_size",
"hidden_size",
"num_layers",
"bias",
"batch_first",
"dropout",
"bidirectional",
],
)
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
num_layers=IntParam(name="num_layers", default=1),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
batch_first=BinaryParam(name="batch_first", default=False, true_prob=0.5),
dropout=IntParam(name="dropout", default=0.0),
bidirectional=BinaryParam(
name="bidirectional", default=False, true_prob=0.5
),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class LSTMCell(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple("LSTMCell", ["input_size", "hidden_size", "bias"])
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class GRUCell(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple("GRUCell", ["input_size", "hidden_size", "bias"])
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class RNNCellBase(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"RNNCellBase", ["input_size", "hidden_size", "bias", "num_chunks"]
)
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
bias=Param(name="bias", default=None),
num_chunks=Param(name="num_chunks", default=None),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class RNNCell(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"RNNCell", ["input_size", "hidden_size", "bias", "nonlinearity"]
)
self.params = self.template_fn(
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
nonlinearity=ChoiceParam(
name="nonlinearity", choices=("tanh",), cprobs=(1,), default="tanh"
),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
class RNNBase(_Factory):
def __init__(self, **kwargs):
_Factory.__init__(self)
self.template_fn = namedtuple(
"RNNBase",
[
"mode",
"input_size",
"hidden_size",
"num_layers",
"bias",
"batch_first",
"dropout",
"bidirectional",
],
)
self.params = self.template_fn(
mode=Param(name="mode", default=None),
input_size=IntParam(name="input_size", default=1),
hidden_size=Param(name="hidden_size", default=None),
num_layers=IntParam(name="num_layers", default=1),
bias=BinaryParam(name="bias", default=True, true_prob=0.5),
batch_first=BinaryParam(name="batch_first", default=False, true_prob=0.5),
dropout=IntParam(name="dropout", default=0.0),
bidirectional=BinaryParam(
name="bidirectional", default=False, true_prob=0.5
),
)
for k, v in kwargs.items():
getattr(self.params, k).val = v
| 35.561856
| 88
| 0.551529
| 731
| 6,899
| 4.949384
| 0.090287
| 0.059701
| 0.061913
| 0.041459
| 0.856551
| 0.843836
| 0.843836
| 0.843836
| 0.843836
| 0.843836
| 0
| 0.01086
| 0.319322
| 6,899
| 193
| 89
| 35.746114
| 0.759583
| 0
| 0
| 0.711864
| 0
| 0
| 0.121467
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.045198
| false
| 0
| 0.022599
| 0
| 0.112994
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
744d71ef3fc7d12fe14127fb797f97e81d72faaf
| 14,719
|
py
|
Python
|
appengine/monorail/services/test/spam_svc_test.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | 1
|
2018-01-02T05:47:07.000Z
|
2018-01-02T05:47:07.000Z
|
appengine/monorail/services/test/spam_svc_test.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | null | null | null |
appengine/monorail/services/test/spam_svc_test.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is govered by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Tests for the spam service."""
import unittest
import mox
from google.appengine.ext import testbed
import settings
from framework import sql
from proto import user_pb2
from proto import tracker_pb2
from services import spam_svc
from testing import fake
class SpamServiceTest(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.mox = mox.Mox()
self.mock_report_tbl = self.mox.CreateMock(sql.SQLTableManager)
self.mock_verdict_tbl = self.mox.CreateMock(sql.SQLTableManager)
self.mock_issue_tbl = self.mox.CreateMock(sql.SQLTableManager)
self.cnxn = self.mox.CreateMock(sql.MonorailConnection)
self.issue_service = fake.IssueService()
self.spam_service = spam_svc.SpamService()
self.spam_service.report_tbl = self.mock_report_tbl
self.spam_service.verdict_tbl = self.mock_verdict_tbl
self.spam_service.issue_tbl = self.mock_issue_tbl
def tearDown(self):
self.testbed.deactivate()
self.mox.UnsetStubs()
self.mox.ResetAll()
def testLookupFlaggers(self):
self.mock_report_tbl.Select(
self.cnxn, cols=['user_id', 'comment_id'],
issue_id=234).AndReturn([[111L, None], [222L, 1]])
self.mox.ReplayAll()
issue_reporters, comment_reporters = (
self.spam_service.LookupIssueFlaggers(self.cnxn, 234))
self.mox.VerifyAll()
self.assertItemsEqual([111L], issue_reporters)
self.assertEqual({1: [222L]}, comment_reporters)
def testFlagIssues_overThresh(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901)
self.mock_report_tbl.InsertRows(self.cnxn,
['issue_id', 'reported_user_id', 'user_id'],
[(78901, 111L, 111L)], ignore=True)
self.mock_report_tbl.Select(self.cnxn,
cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh)])
self.mock_verdict_tbl.Select(
self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
group_by=['issue_id'], issue_id=[78901]).AndReturn([])
self.mock_verdict_tbl.InsertRows(
self.cnxn, ['issue_id', 'is_spam', 'reason', 'project_id'],
[(78901, True, 'threshold', 789)], ignore=True)
self.mox.ReplayAll()
self.spam_service.FlagIssues(
self.cnxn, self.issue_service, [issue], 111L, True)
self.mox.VerifyAll()
self.assertIn(issue, self.issue_service.updated_issues)
def testFlagIssues_underThresh(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901)
self.mock_report_tbl.InsertRows(self.cnxn,
['issue_id', 'reported_user_id', 'user_id'],
[(78901, 111L, 111L)], ignore=True)
self.mock_report_tbl.Select(self.cnxn,
cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
self.mock_verdict_tbl.Select(
self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
group_by=['issue_id'], issue_id=[78901]).AndReturn([])
self.mox.ReplayAll()
self.spam_service.FlagIssues(
self.cnxn, self.issue_service, [issue], 111L, True)
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
def testUnflagIssue_overThresh(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111L)
self.mock_report_tbl.Select(self.cnxn,
cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh)])
self.mock_verdict_tbl.Select(
self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
group_by=['issue_id'], issue_id=[78901]).AndReturn([])
self.mox.ReplayAll()
self.spam_service.FlagIssues(
self.cnxn, self.issue_service, [issue], 111L, False)
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
self.assertEqual(True, issue.is_spam)
def testUnflagIssue_underThresh(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111L)
self.mock_report_tbl.Select(self.cnxn,
cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
self.mock_verdict_tbl.Select(
self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
group_by=['issue_id'], issue_id=[78901]).AndReturn([])
self.mock_verdict_tbl.InsertRows(
self.cnxn, ['issue_id', 'is_spam', 'reason', 'project_id'],
[(78901, False, 'threshold', 789)], ignore=True)
self.mox.ReplayAll()
self.spam_service.FlagIssues(
self.cnxn, self.issue_service, [issue], 111L, False)
self.mox.VerifyAll()
self.assertIn(issue, self.issue_service.updated_issues)
self.assertEqual(False, issue.is_spam)
def testUnflagIssue_underThreshNoManualOerride(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.mock_report_tbl.Delete(self.cnxn, issue_id=[issue.issue_id],
comment_id=None, user_id=111L)
self.mock_report_tbl.Select(self.cnxn,
cols=['issue_id', 'COUNT(*)'], group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, settings.spam_flag_thresh - 1)])
self.mock_verdict_tbl.Select(
self.cnxn, cols=['issue_id', 'reason', 'MAX(created)'],
group_by=['issue_id'],
issue_id=[78901]).AndReturn([(78901, 'manual', '')])
self.mox.ReplayAll()
self.spam_service.FlagIssues(
self.cnxn, self.issue_service, [issue], 111L, False)
self.mox.VerifyAll()
self.assertNotIn(issue, self.issue_service.updated_issues)
self.assertEqual(True, issue.is_spam)
def testGetIssueClassifierQueue_noVerdicts(self):
self.mock_verdict_tbl.Select(self.cnxn,
cols=['issue_id', 'is_spam', 'reason', 'classifier_confidence',
'created'],
where=[
('project_id = %s', [789]),
('classifier_confidence <= %s',
[settings.classifier_moderation_thresh]),
('overruled = %s', [False]),
('issue_id IS NOT NULL', []),
],
order_by=[
('classifier_confidence ASC', []),
('created ASC', [])
],
group_by=['issue_id'],
offset=0,
limit=10,
).AndReturn([])
self.mock_verdict_tbl.SelectValue(self.cnxn,
col='COUNT(*)',
where=[
('project_id = %s', [789]),
('classifier_confidence <= %s',
[settings.classifier_moderation_thresh]),
('overruled = %s', [False]),
('issue_id IS NOT NULL', []),
]).AndReturn(0)
self.mox.ReplayAll()
res, count = self.spam_service.GetIssueClassifierQueue(
self.cnxn, self.issue_service, 789)
self.mox.VerifyAll()
self.assertEqual([], res)
self.assertEqual(0, count)
def testGetIssueClassifierQueue_someVerdicts(self):
self.mock_verdict_tbl.Select(self.cnxn,
cols=['issue_id', 'is_spam', 'reason', 'classifier_confidence',
'created'],
where=[
('project_id = %s', [789]),
('classifier_confidence <= %s',
[settings.classifier_moderation_thresh]),
('overruled = %s', [False]),
('issue_id IS NOT NULL', []),
],
order_by=[
('classifier_confidence ASC', []),
('created ASC', [])
],
group_by=['issue_id'],
offset=0,
limit=10,
).AndReturn([[78901, 0, "classifier", 0.9, "2015-12-10 11:06:24"]])
self.mock_verdict_tbl.SelectValue(self.cnxn,
col='COUNT(*)',
where=[
('project_id = %s', [789]),
('classifier_confidence <= %s',
[settings.classifier_moderation_thresh]),
('overruled = %s', [False]),
('issue_id IS NOT NULL', []),
]).AndReturn(10)
self.mox.ReplayAll()
res, count = self.spam_service.GetIssueClassifierQueue(
self.cnxn, self.issue_service, 789)
self.mox.VerifyAll()
self.assertEqual(1, len(res))
self.assertEqual(10, count)
self.assertEqual(78901, res[0].issue_id)
self.assertEqual(False, res[0].is_spam)
self.assertEqual("classifier", res[0].reason)
self.assertEqual(0.9, res[0].classifier_confidence)
self.assertEqual("2015-12-10 11:06:24", res[0].verdict_time)
def testIsExempt_RegularUser(self):
author = user_pb2.MakeUser(111L, email='test@example.com')
self.assertFalse(self.spam_service._IsExempt(author, False))
author = user_pb2.MakeUser(111L, email='test@chromium.org.example.com')
self.assertFalse(self.spam_service._IsExempt(author, False))
def testIsExempt_ProjectMember(self):
author = user_pb2.MakeUser(111L, email='test@example.com')
self.assertTrue(self.spam_service._IsExempt(author, True))
def testIsExempt_WhitelistedDomain(self):
author = user_pb2.MakeUser(111L, email='test@google.com')
self.assertTrue(self.spam_service._IsExempt(author, False))
def testIsExempt_TrustedNotToSpam(self):
author = user_pb2.MakeUser(111L, email='test@example.com')
author.ignore_action_limits = True
self.assertTrue(self.spam_service._IsExempt(author, False))
def testClassifyIssue_spam(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.spam_service._predict = lambda body: (
{'outputLabel': 'spam'})
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
comment_pb = tracker_pb2.IssueComment()
comment_pb.content = "this is spam"
reporter = user_pb2.MakeUser(111L, email='test@test.com')
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('spam', res['outputLabel'])
reporter.email = 'test@chromium.org.spam.com'
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('spam', res['outputLabel'])
reporter.email = 'test.google.com@test.com'
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('spam', res['outputLabel'])
def testClassifyIssue_Whitelisted(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.spam_service._predict = lambda *args: {
'outputLabel': 'this should not be called'
}
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
comment_pb = tracker_pb2.IssueComment()
comment_pb.content = "this is spam"
reporter = user_pb2.MakeUser(111L, email='test@google.com')
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('ham', res['outputLabel'])
reporter.email = 'test@chromium.org'
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('ham', res['outputLabel'])
def testClassifyIssue_IgnoreActionLimitsAndSpam(self):
issue = fake.MakeTestIssue(
project_id=789, local_id=1, reporter_id=111L, owner_id=456,
summary='sum', status='Live', issue_id=78901, is_spam=True)
self.spam_service._predict = lambda *args: {
'outputLabel': 'this should not be called'
}
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
comment_pb = tracker_pb2.IssueComment()
comment_pb.content = "this is spam"
reporter = user_pb2.MakeUser(111L, email='test@example.com')
reporter.ignore_action_limits = True
res = self.spam_service.ClassifyIssue(issue, comment_pb, reporter, False)
self.assertEqual('ham', res['outputLabel'])
def testClassifyComment_spam(self):
self.spam_service._predict = lambda body: (
{'outputLabel': 'spam'})
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
commenter = user_pb2.MakeUser(111L, email='test@test.com')
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('spam', res['outputLabel'])
commenter.email = 'test@chromium.org.spam.com'
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('spam', res['outputLabel'])
commenter.email = 'test.google.com@test.com'
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('spam', res['outputLabel'])
def testClassifyComment_Whitelisted(self):
self.spam_service._predict = lambda *args: {
'outputLabel': 'this should not be called'
}
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
commenter = user_pb2.MakeUser(111L, email='test@google.com')
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('ham', res['outputLabel'])
commenter.email = 'test@chromium.org'
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('ham', res['outputLabel'])
def testClassifyComment_IgnoreActionLimitsAndSpam(self):
self.spam_service._predict = lambda *args: {
'outputLabel': 'this should not be called'
}
# Prevent missing service inits to fail the test.
self.spam_service.prediction_service = True
commenter = user_pb2.MakeUser(111L, email='test@example.com')
commenter.ignore_action_limits = True
res = self.spam_service.ClassifyComment('this is spam', commenter, False)
self.assertEqual('ham', res['outputLabel'])
| 38.430809
| 77
| 0.672872
| 1,827
| 14,719
| 5.224959
| 0.110016
| 0.042531
| 0.064425
| 0.023151
| 0.820972
| 0.809659
| 0.806306
| 0.795307
| 0.753405
| 0.742196
| 0
| 0.034869
| 0.187513
| 14,719
| 382
| 78
| 38.531414
| 0.763358
| 0.034106
| 0
| 0.69967
| 0
| 0
| 0.130186
| 0.020957
| 0
| 0
| 0
| 0
| 0.118812
| 0
| null | null | 0
| 0.029703
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7471504461794ecaeb1a5979afa34183aa804c70
| 94
|
py
|
Python
|
keyboards/inline/__init__.py
|
NikolaySimakov/Shop-bot
|
c13d5a2b91d9524af156948ff0014ff5357c376c
|
[
"MIT"
] | 50
|
2020-09-27T13:27:02.000Z
|
2022-03-28T13:11:33.000Z
|
keyboards/inline/__init__.py
|
NikolaySimakov/Shop-bot
|
c13d5a2b91d9524af156948ff0014ff5357c376c
|
[
"MIT"
] | null | null | null |
keyboards/inline/__init__.py
|
NikolaySimakov/Shop-bot
|
c13d5a2b91d9524af156948ff0014ff5357c376c
|
[
"MIT"
] | 18
|
2021-02-06T16:54:50.000Z
|
2022-03-25T07:49:37.000Z
|
from . import products_from_catalog
from . import products_from_cart
from . import categories
| 23.5
| 35
| 0.840426
| 13
| 94
| 5.769231
| 0.461538
| 0.4
| 0.48
| 0.586667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 94
| 3
| 36
| 31.333333
| 0.914634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
779242e4906482c689dc7ffbc3cec9b7fc29814c
| 9,376
|
py
|
Python
|
tests/test_permissions.py
|
tordne/Flask-Authorize
|
c440e3328e7fb509ecb18acf77d07aa8a0fe2dde
|
[
"MIT"
] | null | null | null |
tests/test_permissions.py
|
tordne/Flask-Authorize
|
c440e3328e7fb509ecb18acf77d07aa8a0fe2dde
|
[
"MIT"
] | null | null | null |
tests/test_permissions.py
|
tordne/Flask-Authorize
|
c440e3328e7fb509ecb18acf77d07aa8a0fe2dde
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Testing for access control list authorization.
#
# ------------------------------------------------
# imports
# -------
from sqlalchemy import and_
from flask import g
from .fixtures import authorize, Article, ArticleFactory
# helpers
# -------
def query(name, check):
return Article.query.filter(and_(
Article.name.contains(name),
Article.authorized(check)
)).all()
# session
# -------
class TestOtherPermissions(object):
def test_other_delete(self, client, reader, editor):
# other open read permissions
g.user = None
article = ArticleFactory.create(
name='Other Delete Open Article',
owner=editor,
group=editor.groups[0]
).set_permissions('001')
g.user = reader
assert authorize.delete(article)
assert query(article.name, 'delete')
# other closed read permissions
g.user = None
article = ArticleFactory.create(
name='Other Delete Closed Article',
owner=editor,
group=editor.groups[0]
).set_permissions('770')
g.user = reader
assert not authorize.delete(article)
assert not query(article.name, 'delete')
return
def test_other_read(self, client, reader, editor):
# other open read permissions
g.user = None
article = ArticleFactory.create(
name='Other Read Open Article',
owner=editor,
group=editor.groups[0]
).set_permissions('002')
g.user = reader
assert authorize.read(article)
assert query(article.name, 'read')
# other closed read permissions
g.user = None
article = ArticleFactory.create(
name='Other Read Closed Article',
owner=editor,
group=editor.groups[0]
).set_permissions('660')
g.user = reader
assert not authorize.read(article)
assert not query(article.name, 'read')
return
def test_other_update(self, reader, editor):
# other open update permissions
g.user = None
article = ArticleFactory.create(
name='Other Write Open Article',
owner=editor,
group=editor.groups[0]
).set_permissions('004')
g.user = reader
assert authorize.update(article)
assert query(article.name, 'update')
# other closed update permissions
g.user = None
article = ArticleFactory.create(
name='Other Write Closed Article',
owner=editor,
group=editor.groups[0]
).set_permissions('662')
g.user = reader
assert not authorize.update(article)
assert not query(article.name, 'update')
return
def test_other_custom(self, reader, editor):
# other open custom permissions
g.user = None
article = ArticleFactory.create(
name='Other Custom Open Article',
owner=editor,
group=editor.groups[0]
).set_permissions(other=['custom'])
g.user = reader
assert authorize.custom(article)
assert query(article.name, 'custom')
# other closed custom permissions
g.user = None
article = ArticleFactory.create(
name='Other Custom Closed Article',
owner=editor,
group=editor.groups[0]
)
g.user = reader
assert not authorize.custom(article)
assert not query(article.name, 'custom')
return
class TestOwnerPermissions(object):
def test_owner_delete(self, client, reader, editor):
g.user = reader
# other open read permissions
article = ArticleFactory.create(
name='Owner Delete Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('100')
assert authorize.delete(article)
assert query(article.name, 'delete')
# other closed read permissions
article = ArticleFactory.create(
name='Owner Delete Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('070')
assert not authorize.delete(article)
assert not query(article.name, 'delete')
return
def test_owner_read(self, client, reader, editor):
g.user = reader
# other open read permissions
article = ArticleFactory.create(
name='Owner Read Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('200')
assert authorize.read(article)
assert query(article.name, 'read')
# other closed read permissions
article = ArticleFactory.create(
name='Owner Read Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('170')
assert not authorize.read(article)
assert not query(article.name, 'read')
return
def test_owner_update(self, reader, editor):
g.user = reader
# other open update permissions
article = ArticleFactory.create(
name='Owner Write Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('400')
assert authorize.update(article)
assert query(article.name, 'update')
# other closed update permissions
article = ArticleFactory.create(
name='Owner Write Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('270')
assert not authorize.update(article)
assert not query(article.name, 'update')
return
def test_owner_custom(self, reader, editor):
g.user = reader
# other open update permissions
article = ArticleFactory.create(
name='Owner Custom Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions(owner=['custom'])
assert authorize.custom(article)
assert query(article.name, 'custom')
# other closed update permissions
article = ArticleFactory.create(
name='Owner Custom Closed Article',
owner=reader,
group=editor.groups[0]
)
assert not authorize.custom(article)
assert not query(article.name, 'custom')
return
class TestGroupPermissions(object):
def test_group_delete(self, client, reader, editor):
g.user = editor
# other open read permissions
article = ArticleFactory.create(
name='Group Delete Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('010')
assert authorize.delete(article)
assert query(article.name, 'delete')
# other closed read permissions
article = ArticleFactory.create(
name='Group Delete Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('700')
assert not authorize.delete(article)
assert not query(article.name, 'delete')
return
def test_group_read(self, client, reader, editor):
g.user = editor
# other open read permissions
article = ArticleFactory.create(
name='Group Read Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('020')
assert authorize.read(article)
assert query(article.name, 'read')
# other closed read permissions
article = ArticleFactory.create(
name='Group Read Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('710')
assert not authorize.read(article)
assert not query(article.name, 'read')
return
def test_group_update(self, reader, editor):
g.user = editor
# other open update permissions
article = ArticleFactory.create(
name='Group Write Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions('040')
assert authorize.update(article)
assert query(article.name, 'update')
# other closed update permissions
article = ArticleFactory.create(
name='Group Write Closed Article',
owner=reader,
group=editor.groups[0]
).set_permissions('720')
assert not authorize.update(article)
assert not query(article.name, 'update')
return
def test_group_custom(self, reader, editor):
g.user = editor
# other open update permissions
article = ArticleFactory.create(
name='Group Write Open Article',
owner=reader,
group=editor.groups[0]
).set_permissions(group=['custom'])
assert authorize.custom(article)
assert query(article.name, 'custom')
# other closed update permissions
article = ArticleFactory.create(
name='Group Write Closed Article',
owner=reader,
group=editor.groups[0]
)
assert not authorize.custom(article)
assert not query(article.name, 'custom')
return
| 29.859873
| 56
| 0.588417
| 964
| 9,376
| 5.674274
| 0.080913
| 0.095978
| 0.118464
| 0.136015
| 0.90841
| 0.889397
| 0.876965
| 0.873309
| 0.863437
| 0.857952
| 0
| 0.012261
| 0.31282
| 9,376
| 313
| 57
| 29.955272
| 0.836722
| 0.09439
| 0
| 0.704846
| 0
| 0
| 0.095565
| 0
| 0
| 0
| 0
| 0
| 0.211454
| 1
| 0.057269
| false
| 0
| 0.013216
| 0.004405
| 0.140969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77f05920b3c1eb8ac1d38d8c9c76582bc01b49e2
| 43,236
|
py
|
Python
|
Resume-Parser-master-new/updateYamlViaScraping.py
|
eitrheim/Resume-Screening-and-Selection
|
4ee2dd0d6ba917bcf244c704ef5042fe7596e600
|
[
"MIT"
] | 14
|
2020-02-28T22:18:19.000Z
|
2022-03-27T17:35:21.000Z
|
Resume-Parser-master-new/updateYamlViaScraping.py
|
eitrheim/Resume-Screening-and-Selection
|
4ee2dd0d6ba917bcf244c704ef5042fe7596e600
|
[
"MIT"
] | 1
|
2020-03-30T12:47:08.000Z
|
2020-03-30T12:47:08.000Z
|
Resume-Parser-master-new/updateYamlViaScraping.py
|
eitrheim/Resume-Screening-and-Selection
|
4ee2dd0d6ba917bcf244c704ef5042fe7596e600
|
[
"MIT"
] | 7
|
2020-05-06T04:33:55.000Z
|
2022-03-23T08:43:51.000Z
|
import yaml
from bs4 import BeautifulSoup as bs
import requests
import re
import itertools
from operator import itemgetter
def update_yaml(main_section, sub_section, wiki_extension):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/' + wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
for i, li in enumerate(soup.select('li')):
if list(li.attrs.keys()) == ['class']:
pass
elif list(li.attrs.keys()) == ['id']:
pass
elif list(li.attrs.keys()) == ['style']:
pass
elif li.text == 'Private equity firm':
break
elif li.text == 'Fitness wear':
break
elif li.text == 'Canadian Petroleum Companies':
break
elif li.text == 'Casual':
break
elif li.text == 'Acorn Computers':
break
elif li.text == 'Enterprise search':
break
elif li.text == 'Distance education institutions based in the United States':
break
elif 'List of ' in li.text:
break
elif 'Lists of ' in li.text:
break
elif li.text == 'Electronic design':
break
elif li.text == 'Yazoo and Mississippi Valley Railroad':
break
elif li.text == 'Airline codes':
break
elif li.text == 'Telegram & Gazette':
break
elif li.text == 'Film treatment':
break
elif li.text == 'Ocean Freeze Frozen Yogurt':
break
elif li.text == 'Bass effects':
break
elif li.text == 'Multinational company topics':
break
elif li.text == 'Great Books Program':
break
elif li.text == 'Higher education in the United States':
break
elif li.text == 'Polytechnics':
break
else:
x = li.text
x = x.split(' (')[0].split('[')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\'', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_dash_comma_split(main_section, sub_section, wiki_extension):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/' + wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
for i, li in enumerate(soup.select('li')):
if list(li.attrs.keys()) == ['class']:
pass
elif list(li.attrs.keys()) == ['id']:
pass
elif list(li.attrs.keys()) == ['style']:
pass
elif 'List of ' in li.text:
break
elif 'Lists of ' in li.text:
break
elif li.text == '3D printer companies':
break
elif li.text == 'Electronic design':
break
elif li.text == 'Official website':
break
elif li.text == 'Land Grant Colleges':
break
elif li.text == 'Catholicism portal':
break
elif li.text == 'Student loan debt':
break
elif li.text == 'Art school':
break
else:
x = li.text
x = x.split(' (')[0].split('[')[0].split(' –')[0].split(' -')[0].split(',')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\'', '').replace('–', ' ').replace('–', ' ').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table1(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table', class_='sortable wikitable')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table2(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table', class_='wikitable sortable')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table2lists(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table', class_='wikitable sortable')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
for x in cells[col].text.split(','):
x = x.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
if x != "":
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table3(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table', class_='wikitable plainrowheaders sortable')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table4(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table', class_='wikitable')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table4all(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.findAll('table', class_='wikitable')
for table in right_table:
# print(table)
for row in table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table5(main_section, sub_section, wiki_extension, col):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/'+wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table')
for row in right_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) > 1:
x = cells[col].text.split('[')[0].split(' /')[0].split('(')[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '').replace(' ^', '').lstrip()
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table6(main_section, sub_section, wiki_extension):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/' + wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
try:
right_table = soup.find('table', class_='sortable wikitable')
for row in right_table.findAll('tr'):
for col in row.findAll('th'):
for tag in col.find_all(True):
if tag.text in ['Location', 'Partners', 'Industries', 'Assets under management']:
pass
else:
x = tag.text.split(" (")[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '')
print(x)
data_loaded[main_section][sub_section].append(x.lower())
except:
right_table = soup.find('table', class_='wikitable sortable')
for row in right_table.findAll('tr'):
for col in row.findAll('th'):
for tag in col.find_all(True):
if tag.text in ['Location', 'Partners', 'Industries', 'Assets under management']:
pass
else:
x = tag.text.split(" (")[0]
x = re.sub('[,.-]', '', x).replace('\n', '').replace('\'', '')
print(x)
data_loaded[main_section][sub_section].append(x)
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def update_yaml_table7(main_section, sub_section, wiki_extension):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
print("Accessing Wikipedia")
html = requests.get('https://en.wikipedia.org/wiki/' + wiki_extension).text
soup = bs(html, 'html.parser')
print("Getting List Items and Adding to YAML\n")
right_table = soup.find('table')
for row in right_table.findAll('tr'):
for col in row.findAll('th'):
for tag in col.find_all(True):
if tag.text in ['Location', 'Partners', 'Industries', 'Assets under management']:
pass
else:
x = tag.text.split(" (")[0]
x = re.sub('[,.]', '', x).replace('-', '').replace('\n', '').replace('\'', '')
print(x)
data_loaded[main_section][sub_section].append(x.lower())
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=lambda x: x[0])
data_loaded[main_section][sub_section] = list(
k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
print("\nSorted and Dropped Duplicates")
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
def sort_yaml(main_section, sub_section):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
try:
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=itemgetter(0, 1))
data_loaded[main_section][sub_section] = list(k for k, _ in itertools.groupby(data_loaded[main_section][sub_section]))
data_loaded[main_section][sub_section] = sorted(data_loaded[main_section][sub_section], key=itemgetter(0, 1))
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print(sub_section, "Sorted & Saved")
def sort_yaml_flat(main_section, sub_section):
with open('confs/config.yaml', 'r') as stream:
try:
data_loaded = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
try:
data_loaded[main_section][sub_section] = sorted(list(set(data_loaded[main_section][sub_section])))
except IndexError:
pass
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print(sub_section, "Sorted & Saved")
############################ updating schools ############################
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_American_colleges_and_universities_abroad', 0)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_NAIH_Institutions')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_NCAA_Division_II_wrestling_programs', 0)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_Benedictine_colleges_and_universities', 0)
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_colleges_and_universities_in_the_United_States_by_endowment', 0)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_schools_of_international_relations_in_the_United_States', 2)
# update_yaml_dash_comma_split('case_agnostic_education', 'community_college', 'List_of_junior_colleges_in_the_United_States')
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_universities_accredited_by_DEAC')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_medical_schools_in_the_United_States', 1)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_Lutheran_colleges_and_universities_in_the_United_States', 0)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_library_science_schools', 1)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_library_science_schools')
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_liberal_arts_colleges_in_the_United_States')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_law_schools_in_the_United_States', 1)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_land-grant_universities')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_University_of_Puerto_Rico_campuses', 0)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_Posse_Scholars_school_partnerships')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_pharmacy_schools_in_the_United_States', 2)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_online_colleges_in_the_United_States')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_American_universities_with_Olympic_medals', 1)
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_nursing_schools_in_the_United_States')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'National_Space_Grant_College_and_Fellowship_Program')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'National_Sea_Grant_College_Program')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_research_universities_in_the_United_States', 0)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_Catholic_universities_and_colleges_in_the_United_States')
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_state_and_territorial_universities_in_the_United_States')
# update_yaml_table4('case_agnostic_education', 'community_college', 'List_of_United_States_technological_universities', 0)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_schools_accredited_by_the_Association_of_Theological_Schools_in_the_United_States_and_Canada', 0)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_tribal_colleges_and_universities')
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_schools_of_veterinary_medicine')
# update_yaml('case_agnostic_education', 'community_college', 'List_of_vocational_colleges_in_the_United_States')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_for-profit_universities_and_colleges')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_largest_universities_and_university_networks_by_enrollment', 1)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_postgraduate-only_institutions')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_unaccredited_institutions_of_higher_education')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_international_schools')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_EQUIS_accredited_institutions')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_institutions_accredited_by_AMBA')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_schools_of_international_relations')
# update_yaml_dash_comma_split('case_agnostic_education', 'other_universities', 'List_of_liberal_arts_colleges')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_lantern_slide_collections', 0)
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_engineering_schools', 1)
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_engineering_schools', 1)
# update_yaml('case_agnostic_education', 'other_universities', 'List_of_art_schools')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_open_universities', 0)
# update_yaml('case_agnostic_education', 'other_universities', 'National_university')
# if it says community college move it to community/technical college
with open('confs/config.yaml', 'r') as stream:
data_loaded = yaml.safe_load(stream)
for item in data_loaded['case_agnostic_education']['other_universities']:
if 'Community College' in item:
data_loaded['case_agnostic_education']['community_college'].append(item)
data_loaded['case_agnostic_education']['other_universities'].remove(item)
print(item, "added to community_college")
if 'Technical College' in item:
data_loaded['case_agnostic_education']['community_college'].append(item)
data_loaded['case_agnostic_education']['other_universities'].remove(item)
print(item, "added to community_college")
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
# deleting schools in 'other' if they are in another section
with open('confs/config.yaml', 'r') as stream:
data_loaded = yaml.safe_load(stream)
for company_type in data_loaded['case_agnostic_education'].keys():
if company_type == 'other_universities':
pass
else:
for item in data_loaded['case_agnostic_education'][company_type]:
if item in data_loaded['case_agnostic_education']['other_universities']:
data_loaded['case_agnostic_education']['other_universities'].remove(item)
print(item, "deleted from other_universities")
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
############################ updating companies ############################
# update_yaml('case_agnostic_work', 'company_fin', 'List_of_asset_management_firms')
# update_yaml_table1('case_agnostic_work', 'company_fin', 'List_of_asset_management_firms', 1)
# update_yaml('case_agnostic_work', 'company_fin', 'List_of_investment_banks')
# update_yaml_table6('case_agnostic_work', 'company_fin', 'List_of_venture_capital_firms')
# update_yaml_table2('case_agnostic_work', 'company_fin', 'List_of_private_equity_firms', 0)
# update_yaml_table2('case_agnostic_work', 'company_fin', 'List_of_systemically_important_banks', 0)
# update_yaml_table2('case_agnostic_work', 'company_consumer', 'List_of_swimwear_brands', 0)
# update_yaml_table3('case_agnostic_work', 'company_consumer', 'List_of_sporting_goods_manufacturers', 0)
# update_yaml_table2('case_agnostic_work', 'company_consumer', 'List_of_lingerie_brands', 0)
# update_yaml('case_agnostic_work', 'company_consumer', 'List_of_fitness_wear_brands')
# update_yaml('case_agnostic_work', 'company_energychem', 'List_of_concentrating_solar_thermal_power_companies')
# update_yaml('case_agnostic_work', 'company_energychem', 'List_of_United_States_water_companies')
# update_yaml('case_agnostic_work', 'company_energychem', 'List_of_oilfield_service_companies')
# update_yaml('case_agnostic_work', 'company_industrial', 'List_of_wind_turbine_manufacturers')
# update_yaml_table4('case_agnostic_work', 'company_industrial', 'List_of_traction_motor_manufacturers', 0)
# update_yaml('case_agnostic_work', 'company_industrial', 'List_of_touch-solution_manufacturers')
# update_yaml_table2('case_agnostic_work', 'company_energychem', 'List_of_steel_producers', 13)
# update_yaml_table4('case_agnostic_work', 'company_tech', 'Semiconductor_equipment_sales_leaders_by_year', 1)
# update_yaml_table2('case_agnostic_work', 'company_services', 'List_of_multiple-system_operators', 0)
# update_yaml_table2('case_agnostic_work', 'company_consumer', 'List_of_supermarket_chains', 0)
# update_yaml('case_agnostic_work', 'company_consumer', 'List_of_pharmacies')
# update_yaml_table2('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 0)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 6)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 7)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 8)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 9)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 10)
# update_yaml_table2lists('case_agnostic_work', 'company_consumer', 'List_of_chained-brand_hotels', 11)
# update_yaml_table2('case_agnostic_work', 'company_services', 'List_of_largest_law_firms_by_revenue', 1)
# update_yaml_table2('case_agnostic_work', 'company_services', 'List_of_largest_United_States-based_law_firms_by_head_count', 1)
# update_yaml('case_agnostic_work', 'company_services', 'List_of_marketing_research_firms')
# update_yaml('case_agnostic_work', 'company_consumer', 'List of executive search firms')
# update_yaml('case_agnostic_work', 'company_tech', 'List_of_computer_system_manufacturers')
# update_yaml('case_agnostic_work', 'company_tech', 'List_of_computer_hardware_manufacturers')
# update_yaml('case_agnostic_work', 'company_tech', 'List_of_advertising_technology_companies')
# update_yaml_table2('case_agnostic_work', 'company_tech', 'List_of_flash_memory_controller_manufacturers', 0)
# update_yaml_table4('case_agnostic_work', 'company_tech', 'List_of_EDA_companies', 0)
# update_yaml_table2('case_agnostic_work', 'company_tech', 'List_of_electric-vehicle-battery_manufacturers', 0)
# update_yaml_table2('case_agnostic_work', 'company_services', 'List_of_telephone_operating_companies', 1)
# update_yaml('case_agnostic_work', 'company_energychem', 'List_of_silicon_producers')
# update_yaml_table2('case_agnostic_work', 'company_tech', 'List_of_photovoltaics_companies', 0)
# update_yaml_table4all('case_agnostic_work', 'company_consumer', 'List_of_airlines_of_the_United_States', 0)
# update_yaml_table5('case_agnostic_work', 'company_consumer', 'List_of_casinos_in_the_United_States', 0)
# update_yaml_table2('case_agnostic_work', 'company_consumer', 'List_of_cruise_lines', 0)
# update_yaml('case_agnostic_work', 'company_industrial', 'List_of_Class_I_railroads')
# update_yaml('case_agnostic_work', 'company_consumer', 'List_of_charter_airlines')
# update_yaml_table2('case_agnostic_work', 'company_services', 'List_of_largest_container_shipping_companies', 0)
# update_yaml('case_agnostic_work', 'company_tech', 'List_of_system-on-a-chip_suppliers')
# update_yaml_table2('case_agnostic_work', 'company_tech', 'List_of_companies_involved_in_quantum_computing_or_communication', 0)
# update_yaml_table2('case_agnostic_work', 'company_tech', 'List_of_data_recovery_companies', 0)
# update_yaml_table4all('case_agnostic_work', 'company_services', 'List_of_IT_consulting_firms', 0)
# update_yaml_table4('case_agnostic_work', 'company_tech', 'List_of_largest_Internet_companies', 1)
# update_yaml_table4('case_agnostic_work', 'company_fin', 'List_of_hedge_funds', 1)
# update_yaml_table4('case_agnostic_work', 'company_services', 'List_of_newspapers_in_the_United_States', 1)
# update_yaml_table4('case_agnostic_work', 'company_services', 'List_of_newspapers_by_circulation', 0)
# update_yaml('case_agnostic_work', 'company_services', 'List_of_newspapers_serving_cities_over_100,000_in_the_United_States')
# update_yaml_table4all('case_agnostic_work', 'company_consumer', 'List_of_restaurant_chains_in_the_United_States', 0)
# update_yaml_table4all('case_agnostic_work', 'company_industrial', 'List_of_largest_manufacturing_companies_by_revenue', 1)
# update_yaml('case_agnostic_work', 'company_fin', 'List_of_United_States_insurance_companies')
# update_yaml('case_agnostic_work', 'company_fin', 'List_of_international_banking_institutions')
# update_yaml_table4all('case_agnostic_work', 'company_tech', 'Semiconductor_equipment_sales_leaders_by_year', 2)
# update_yaml_table4('case_agnostic_work', 'company_energychem', 'List_of_largest_aluminum_producers_by_output', 1)
# update_yaml_table4('case_agnostic_work', 'company_tech', 'List_of_glossy_display_branding_manufacturers', 0)
# update_yaml_table4('case_agnostic_work', 'company_consumer', 'List_of_bean-to-bar_chocolate_manufacturers', 0)
# update_yaml('case_agnostic_work', 'company_consumer', 'List_of_frozen_yogurt_companies')
# update_yaml('case_agnostic_work', 'company_services', 'List_of_websites_about_food_and_drink')
# update_yaml('case_agnostic_work', 'company_tech', 'List_of_bass_amplifier_and_loudspeaker_manufacturers')
# update_yaml_table4('case_agnostic_work', 'company_tech', 'List_of_digital_camera_brands', 2)
# update_yaml_table4all('case_agnostic_work', 'company_other', 'List_of_SRI_International_spin-offs', 0)
# update_yaml_table4('case_agnostic_work', 'company_other', 'List_of_largest_European_manufacturing_companies_by_revenue', 0)
# update_yaml('case_agnostic_work', 'company_other', 'List_of_multinational_corporations')
# update_yaml_table4('case_agnostic_work', 'company_other', 'List_of_companies_of_the_European_Union', 1)
# update_yaml_table4('case_agnostic_work', 'company_energychem', 'List_of_copper_production_by_company', 1)
# update_yaml_table4all('case_agnostic_work', 'company_other', 'List_of_largest_corporate_profits_and_losses', 1)
# update_yaml('case_agnostic_work', 'company_other', 'List_of_private_security_companies')
# update_yaml_table4all('case_agnostic_work', 'company_tech', 'List_of_private_spaceflight_companies', 0)
# update_yaml('case_agnostic_work', 'company_other', 'List_of_multi-level_marketing_companies')
# update_yaml('case_agnostic_work', 'company_services', 'List_of_television_production_companies')
# update_yaml_table4all('case_agnostic_work', 'company_services', 'List_of_United_States_over-the-air_television_networks', 0)
# update_yaml_table4all('case_agnostic_work', 'company_services', 'List_of_United_States_pay_television_channels', 0)
# update_yaml_dash_comma_split('case_agnostic_work', 'company_other', 'List_of_modeling_agencies')
# update_yaml_dash_comma_split('case_agnostic_work', 'company_tech', 'List_of_3D_printer_manufacturers')
# update_yaml_dash_comma_split('case_agnostic_work', 'company_tech', 'List_of_semiconductor_IP_core_vendors')
# update_yaml_dash_comma_split('case_agnostic_work', 'company_energychem', 'List_of_United_States_electric_companies')
with open('confs/config.yaml', 'r') as stream:
data_loaded = yaml.safe_load(stream)
# # deleting companies in consumer if they are in foodbev
for item in data_loaded['case_agnostic_work']['company_foodbev']:
if item in data_loaded['case_agnostic_work']['company_consumer']:
data_loaded['case_agnostic_work']['company_consumer'].remove(item)
print(item, "deleted from company_consumer")
# # deleting companies in 'other' if they are in another section
for company_type in data_loaded['case_agnostic_work'].keys():
if company_type == 'company_other':
pass
else:
for item in data_loaded['case_agnostic_work'][company_type]:
if item in data_loaded['case_agnostic_work']['company_other']:
data_loaded['case_agnostic_work']['company_other'].remove(item)
print(item, "deleted from company_other")
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
##################################################
# figure out how to scrape these
# https://en.wikipedia.org/wiki/List_of_United_States_natural_gas_companies
# https://en.wikipedia.org/wiki/List_of_oil_exploration_and_production_companies#North_America
# https://en.wikipedia.org/wiki/List_of_modern_armament_manufacturers
# https://en.wikipedia.org/wiki/List_of_automobile_manufacturers#U
# https://en.wikipedia.org/wiki/state_drive_manufacturers
# https://en.wikipedia.org/wiki/List_of_video_game_developers
# https://en.wikipedia.org/wiki/List_of_video_game_publishers
# https://en.wikipedia.org/wiki/List_of_animation_studios
# https://en.wikipedia.org/wiki/List_of_convenience_stores
# https://en.wikipedia.org/wiki/List_of_retailers%27_cooperatives
# https://en.wikipedia.org/wiki/List_of_department_stores_by_country
# https://en.wikipedia.org/wiki/List_of_game_manufacturers
# https://en.wikipedia.org/wiki/List_of_supermarket_chains_in_North_America
# https://en.wikipedia.org/wiki/List_of_bookstore_chains
# https://en.wikipedia.org/wiki/List_of_book_sales_clubs
# https://en.wikipedia.org/wiki/List_of_superstores
# https://en.wikipedia.org/wiki/List_of_cleaning_companies
# https://en.wikipedia.org/wiki/List_of_press_release_agencies
# https://en.wikipedia.org/wiki/List_of_CAx_companies
# https://en.wikipedia.org/wiki/List_of_electronics_brands
# https://en.wikipedia.org/wiki/List_of_bus_operating_companies
# https://en.wikipedia.org/wiki/List_of_big_data_companies
# https://en.wikipedia.org/wiki/Tech_companies_in_the_New_York_metropolitan_area
# https://en.wikipedia.org/wiki/List_of_banks_(alphabetical)
# https://en.wikipedia.org/wiki/List_of_mobile_network_operators
# https://en.wikipedia.org/wiki/List_of_communication_satellite_companies
# https://en.wikipedia.org/wiki/List_of_largest_biomedical_companies_by_revenue
# https://en.wikipedia.org/wiki/List_of_film_distributors_by_country#United_States
# https://en.wikipedia.org/wiki/List_of_solid-state_drive_manufacturers
# https://en.wikipedia.org/wiki/List_of_soft_drink_producers
# https://en.wikipedia.org/wiki/List_of_PLC_manufacturers
# https://en.wikipedia.org/wiki/List_of_major_arms_industry_corporations_by_country
# https://en.wikipedia.org/wiki/List_of_public_corporations_by_market_capitalization
# https://en.wikipedia.org/wiki/List_of_television_manufacturers
# https://en.wikipedia.org/wiki/List_of_flat_panel_display_manufacturers
# https://en.wikipedia.org/wiki/List_of_loudspeaker_manufacturers
# https://en.wikipedia.org/wiki/List_of_unicorn_startup_companies
# https://en.wikipedia.org/wiki/List_of_government-owned_companies
# https://en.wikipedia.org/wiki/List_of_holding_companies
# https://en.wikipedia.org/wiki/List_of_franchises
# https://en.wikipedia.org/wiki/List_of_largest_employers
# https://en.wikipedia.org/wiki/List_of_conglomerates
# https://en.wikipedia.org/wiki/List_of_company_registers#United_States
# https://en.wikipedia.org/wiki/List_of_film_production_companies
# sorting sections
# sort_yaml_flat('case_agnostic_work', 'company_foodbev')
# sort_yaml_flat('case_agnostic_work', 'company_fin')
# sort_yaml_flat('case_agnostic_work', 'company_tech')
# sort_yaml_flat('case_agnostic_work', 'company_services')
# sort_yaml_flat('case_agnostic_work', 'company_health')
# sort_yaml_flat('case_agnostic_work', 'company_energychem')
# sort_yaml_flat('case_agnostic_work', 'company_other')
# sort_yaml_flat('case_agnostic_work', 'company_consumer')
# sort_yaml_flat('case_agnostic_hobbies', 'hobbies')
# sort_yaml('case_agnostic_whole_resume', 'honor_societies')
# sort_yaml('case_agnostic_education', 'community_college')
# sort_yaml('case_agnostic_education', 'other_universities')
# sort_yaml_flat('case_agnostic_work', 'company_foodbev')
sort_yaml('case_agnostic_skill', 'technical_skills')
############################ cleaning up softskills section ############################
with open('confs/config.yaml', 'r') as stream:
data_loaded = yaml.safe_load(stream)
myLIST = []
for item in data_loaded['case_agnostic_whole_resume']['soft_skills']:
myLIST.append(item.lower())
myLIST = sorted(list(set(myLIST)))
data_loaded['case_agnostic_whole_resume']['soft_skills'] = myLIST
with open('confs/config.yaml', 'w') as fp:
yaml.dump(data_loaded, fp)
print("Updated Yaml File Saved")
# already run:
# update_yaml('case_agnostic_work', 'company_foodbev', 'List_of_food_companies')
# update_yaml_table1('case_agnostic_work', 'company_health', 'List_of_largest_biotechnology_and_pharmaceutical_companies', 1)
# update_yaml_table4('case_agnostic_work', 'company_fin', 'List_of_largest_banks', 1)
# update_yaml_table2('case_agnostic_work', 'company_energychem', 'List_of_largest_oil_and_gas_companies_by_revenue', 1)
# update_yaml('case_agnostic_work', 'company_services', 'List_of_management_consulting_firms')
# update_yaml_table2('case_agnostic_work', 'company_energychem', 'List_of_largest_chemical_producers', 1)
# update_yaml('case_agnostic_work', 'company_other', 'List_of_companies_in_the_Chicago_metropolitan_area')
# update_yaml_table2('case_agnostic_work', 'company_other', 'List_of_largest_companies_by_revenue', 0)
# update_yaml('case_agnostic_work', 'company_other', 'List_of_Six_Sigma_companies')
# update_yaml_table4('case_agnostic_work', 'company_other', 'List_of_S%26P_500_companies', 1)
# update_yaml('case_agnostic_work', 'company_health', 'List_of_pharmaceutical_companies')
# list_of_links = []
# print("Accessing Wikipedia")
# html = requests.get('https://en.wikipedia.org/wiki/List_of_college_athletic_programs_by_U.S._state').text
# soup = bs(html, 'html.parser')
# for i, li in enumerate(soup.select('li')):
# if list(li.attrs.keys()) == ['class']:
# pass
# elif list(li.attrs.keys()) == ['id']:
# pass
# elif list(li.attrs.keys()) == ['style']:
# pass
# elif li.text == 'List of Division I athletic directors':
# break
# else:
# list_of_links.append(li.text)
# print(li.text)
#
# for link in list_of_links:
# update_yaml_table4all('case_agnostic_education', 'other_universities', link, 1)
# update_yaml_table7('case_agnostic_education', 'other_universities', 'List_of_NAIA_institutions')
# update_yaml_table7('case_agnostic_education', 'other_universities', 'List_of_NCAA_Division_I_institutions')
# update_yaml_table7('case_agnostic_education', 'other_universities', 'List_of_NCAA_Division_II_institutions')
# update_yaml_table7('case_agnostic_education', 'other_universities', 'List_of_NCAA_Division_III_institutions')
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_USCAA_institutions', 0)
# update_yaml('case_agnostic_education', 'community_college', 'List_of_community_colleges')
# update_yaml_table4all('case_agnostic_education', 'other_universities', 'List_of_NCCAA_institutions', 0)
# update_yaml_table4('case_agnostic_education', 'other_universities', 'List_of_historically_black_colleges_and_universities', 0)
# update_yaml('case_agnostic_hobbies', 'hobbies', 'List_of_hobbies')
| 53.64268
| 176
| 0.709941
| 5,665
| 43,236
| 5.0406
| 0.093557
| 0.040763
| 0.062196
| 0.088601
| 0.83495
| 0.827001
| 0.811872
| 0.762984
| 0.71252
| 0.656733
| 0
| 0.006776
| 0.153506
| 43,236
| 805
| 177
| 53.709317
| 0.773354
| 0.470719
| 0
| 0.826446
| 0
| 0.012397
| 0.196343
| 0.01155
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02686
| false
| 0.049587
| 0.012397
| 0
| 0.039256
| 0.161157
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77fc264b84e19afc24c6ad18d3401a8ccb521f4e
| 13,587
|
py
|
Python
|
app/model/schema/token.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 2
|
2021-08-19T12:35:25.000Z
|
2022-02-16T04:13:38.000Z
|
app/model/schema/token.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 46
|
2021-09-02T03:22:05.000Z
|
2022-03-31T09:20:00.000Z
|
app/model/schema/token.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 1
|
2021-11-17T23:18:27.000Z
|
2021-11-17T23:18:27.000Z
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
from typing import (
List,
Optional
)
import math
from pydantic import (
BaseModel,
Field,
validator
)
from web3 import Web3
from .types import (
MMDD_constr,
YYYYMMDD_constr
)
############################
# REQUEST
############################
class IbetStraightBondCreate(BaseModel):
"""ibet Straight Bond schema (Create)"""
name: str = Field(max_length=100)
total_supply: int = Field(..., ge=0, le=100_000_000)
face_value: int = Field(..., ge=0, le=5_000_000_000)
purpose: str = Field(max_length=2000)
symbol: Optional[str] = Field(max_length=100)
redemption_date: Optional[YYYYMMDD_constr]
redemption_value: Optional[int] = Field(None, ge=0, le=5_000_000_000)
return_date: Optional[YYYYMMDD_constr]
return_amount: Optional[str] = Field(max_length=2000)
interest_rate: Optional[float] = Field(None, ge=0.0000, le=100.0000)
interest_payment_date: Optional[List[MMDD_constr]]
transferable: Optional[bool]
is_redeemed: Optional[bool]
status: Optional[bool]
is_offering: Optional[bool]
tradable_exchange_contract_address: Optional[str]
personal_info_contract_address: Optional[str]
image_url: Optional[List[str]]
contact_information: Optional[str] = Field(max_length=2000)
privacy_policy: Optional[str] = Field(max_length=5000)
transfer_approval_required: Optional[bool]
is_manual_transfer_approval: Optional[bool]
@validator("interest_rate")
def interest_rate_4_decimal_places(cls, v):
if v is not None:
float_data = float(v * 10 ** 4)
int_data = int(v * 10 ** 4)
if not math.isclose(int_data, float_data):
raise ValueError("interest_rate must be less than or equal to four decimal places")
return v
@validator("interest_payment_date")
def interest_payment_date_list_length_less_than_13(cls, v):
if v is not None and len(v) >= 13:
raise ValueError("list length of interest_payment_date must be less than 13")
return v
@validator("tradable_exchange_contract_address")
def tradable_exchange_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("tradable_exchange_contract_address is not a valid address")
return v
@validator("personal_info_contract_address")
def personal_info_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("personal_info_contract_address is not a valid address")
return v
class IbetStraightBondUpdate(BaseModel):
"""ibet Straight Bond schema (Update)"""
face_value: Optional[int] = Field(None, ge=0, le=5_000_000_000)
interest_rate: Optional[float] = Field(None, ge=0.0000, le=100.0000)
interest_payment_date: Optional[List[MMDD_constr]]
redemption_value: Optional[int] = Field(None, ge=0, le=5_000_000_000)
transferable: Optional[bool]
status: Optional[bool]
is_offering: Optional[bool]
is_redeemed: Optional[bool]
tradable_exchange_contract_address: Optional[str]
personal_info_contract_address: Optional[str]
contact_information: Optional[str] = Field(max_length=2000)
privacy_policy: Optional[str] = Field(max_length=5000)
transfer_approval_required: Optional[bool]
is_manual_transfer_approval: Optional[bool]
memo: Optional[str] = Field(max_length=2000)
@validator("interest_rate")
def interest_rate_4_decimal_places(cls, v):
if v is not None:
float_data = float(v * 10 ** 4)
int_data = int(v * 10 ** 4)
if not math.isclose(int_data, float_data):
raise ValueError("interest_rate must be rounded to 4 decimal places")
return v
@validator("interest_payment_date")
def interest_payment_date_list_length_less_than_13(cls, v):
if v is not None and len(v) >= 13:
raise ValueError("list length of interest_payment_date must be less than 13")
return v
@validator("tradable_exchange_contract_address")
def tradable_exchange_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("tradable_exchange_contract_address is not a valid address")
return v
@validator("personal_info_contract_address")
def personal_info_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("personal_info_contract_address is not a valid address")
return v
class IbetStraightBondAdd(BaseModel):
"""ibet Straight Bond schema (Additional Issue)"""
account_address: str
amount: int = Field(..., ge=1, le=100_000_000)
@validator("account_address")
def account_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("account_address is not a valid address")
return v
class IbetStraightBondTransfer(BaseModel):
"""ibet Straight Bond schema (Transfer)"""
token_address: str
from_address: str
to_address: str
amount: int = Field(..., ge=1, le=100_000_000)
@validator("token_address")
def token_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("token_address is not a valid address")
return v
@validator("from_address")
def from_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("from_address is not a valid address")
return v
@validator("to_address")
def to_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("to_address is not a valid address")
return v
class IbetShareCreate(BaseModel):
"""ibet Share schema (Create)"""
name: str = Field(max_length=100)
issue_price: int = Field(..., ge=0, le=5_000_000_000)
principal_value: int = Field(..., ge=0, le=5_000_000_000)
total_supply: int = Field(..., ge=0, le=100_000_000)
symbol: Optional[str] = Field(max_length=100)
dividends: Optional[float] = Field(None, ge=0.00, le=5_000_000_000.00)
dividend_record_date: Optional[YYYYMMDD_constr]
dividend_payment_date: Optional[YYYYMMDD_constr]
cancellation_date: Optional[YYYYMMDD_constr]
transferable: Optional[bool]
status: Optional[bool]
is_offering: Optional[bool]
tradable_exchange_contract_address: Optional[str]
personal_info_contract_address: Optional[str]
contact_information: Optional[str] = Field(max_length=2000)
privacy_policy: Optional[str] = Field(max_length=5000)
transfer_approval_required: Optional[bool]
is_manual_transfer_approval: Optional[bool]
is_canceled: Optional[bool]
@validator("dividends")
def dividends_2_decimal_places(cls, v):
if v is not None:
float_data = float(v * 10 ** 2)
int_data = int(v * 10 ** 2)
if not math.isclose(int_data, float_data):
raise ValueError("dividends must be rounded to 2 decimal places")
return v
@validator("tradable_exchange_contract_address")
def tradable_exchange_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("tradable_exchange_contract_address is not a valid address")
return v
@validator("personal_info_contract_address")
def personal_info_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("personal_info_contract_address is not a valid address")
return v
class IbetShareUpdate(BaseModel):
"""ibet Share schema (Update)"""
cancellation_date: Optional[YYYYMMDD_constr]
dividend_record_date: Optional[YYYYMMDD_constr]
dividend_payment_date: Optional[YYYYMMDD_constr]
dividends: Optional[float] = Field(None, ge=0.00, le=5_000_000_000.00)
tradable_exchange_contract_address: Optional[str]
personal_info_contract_address: Optional[str]
transferable: Optional[bool]
status: Optional[bool]
is_offering: Optional[bool]
contact_information: Optional[str] = Field(max_length=2000)
privacy_policy: Optional[str] = Field(max_length=5000)
transfer_approval_required: Optional[bool]
is_manual_transfer_approval: Optional[bool]
principal_value: Optional[int] = Field(None, ge=0, le=5_000_000_000)
is_canceled: Optional[bool]
memo: Optional[str] = Field(max_length=2000)
@validator("dividends")
def dividends_2_decimal_places(cls, v):
if v is not None:
float_data = float(v * 10 ** 2)
int_data = int(v * 10 ** 2)
if not math.isclose(int_data, float_data):
raise ValueError("dividends must be rounded to 2 decimal places")
return v
@validator("dividends")
def dividend_information_all_required(cls, v, values, **kwargs):
if v is not None:
if values.get("dividend_record_date") is None or values.get("dividend_payment_date") is None:
raise ValueError("all items are required to update the dividend information")
return v
@validator("tradable_exchange_contract_address")
def tradable_exchange_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("tradable_exchange_contract_address is not a valid address")
return v
@validator("personal_info_contract_address")
def personal_info_contract_address_is_valid_address(cls, v):
if v is not None and not Web3.isAddress(v):
raise ValueError("personal_info_contract_address is not a valid address")
return v
class IbetShareTransfer(BaseModel):
"""ibet Share schema (Transfer)"""
token_address: str
from_address: str
to_address: str
amount: int = Field(..., ge=1, le=100_000_000)
@validator("token_address")
def token_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("token_address is not a valid address")
return v
@validator("from_address")
def from_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("from_address is not a valid address")
return v
@validator("to_address")
def to_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("to_address is not a valid address")
return v
class IbetShareAdd(BaseModel):
"""ibet Share schema (Additional Issue)"""
account_address: str
amount: int = Field(..., ge=1, le=100_000_000)
@validator("account_address")
def account_address_is_valid_address(cls, v):
if not Web3.isAddress(v):
raise ValueError("account_address is not a valid address")
return v
class IbetSecurityTokenApproveTransfer(BaseModel):
"""ibet SecurityToken schema (ApproveTransfer)"""
application_id: int
data: str
class IbetSecurityTokenCancelTransfer(BaseModel):
"""ibet SecurityToken schema (CancelTransfer)"""
application_id: int
data: str
class IbetSecurityTokenEscrowApproveTransfer(BaseModel):
"""ibet SecurityTokenEscrow schema (ApproveTransfer)"""
escrow_id: int
data: str
############################
# RESPONSE
############################
class TokenAddressResponse(BaseModel):
"""token address"""
token_address: str
token_status: int
class IbetStraightBondResponse(BaseModel):
"""ibet Straight Bond schema (Response)"""
issuer_address: str
token_address: str
name: str
symbol: str
total_supply: int
face_value: int
redemption_date: str
redemption_value: int
return_date: str
return_amount: str
purpose: str
interest_rate: float
interest_payment_date: List[str]
transferable: bool
is_redeemed: bool
status: bool
is_offering: bool
tradable_exchange_contract_address: str
personal_info_contract_address: str
contact_information: str
privacy_policy: str
issue_datetime: str
token_status: int
transfer_approval_required: bool
is_manual_transfer_approval: bool
memo: str
class IbetShareResponse(BaseModel):
"""ibet Share schema (Response)"""
issuer_address: str
token_address: str
name: str
symbol: str
issue_price: int
principal_value: int
total_supply: int
dividends: float
dividend_record_date: str
dividend_payment_date: str
cancellation_date: str
transferable: bool
transfer_approval_required: bool
is_manual_transfer_approval: bool
status: bool
is_offering: bool
tradable_exchange_contract_address: str
personal_info_contract_address: str
contact_information: str
privacy_policy: str
issue_datetime: str
token_status: int
is_canceled: bool
memo: str
| 34.572519
| 105
| 0.697137
| 1,784
| 13,587
| 5.067825
| 0.11435
| 0.059728
| 0.0146
| 0.061719
| 0.778343
| 0.751797
| 0.738856
| 0.731335
| 0.723371
| 0.708882
| 0
| 0.029598
| 0.209244
| 13,587
| 392
| 106
| 34.660714
| 0.811895
| 0.080445
| 0
| 0.795987
| 0
| 0
| 0.129155
| 0.05015
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.016722
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
248692685a3cab5df3e84725100cf965ee2f0368
| 92
|
py
|
Python
|
get-gip.py
|
dev100kg/get-gip
|
d034ad80d675d0837718da483a34ca188685dd22
|
[
"Unlicense"
] | null | null | null |
get-gip.py
|
dev100kg/get-gip
|
d034ad80d675d0837718da483a34ca188685dd22
|
[
"Unlicense"
] | null | null | null |
get-gip.py
|
dev100kg/get-gip
|
d034ad80d675d0837718da483a34ca188685dd22
|
[
"Unlicense"
] | null | null | null |
import requests
def getGlobalIp():
return requests.get("http://inet-ip.info/ip").text
| 15.333333
| 54
| 0.706522
| 13
| 92
| 5
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 92
| 5
| 55
| 18.4
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0.23913
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
24cbea9cbc9c7d31faef9a241e2fd30fccee62a2
| 181
|
py
|
Python
|
productos/views.py
|
Yaco-Lee/SaraswatiApp
|
8c8bc03987c7f921611864ba58945f6ec4f33b6d
|
[
"MIT"
] | null | null | null |
productos/views.py
|
Yaco-Lee/SaraswatiApp
|
8c8bc03987c7f921611864ba58945f6ec4f33b6d
|
[
"MIT"
] | 3
|
2021-03-30T14:09:41.000Z
|
2021-06-04T23:42:04.000Z
|
productos/views.py
|
Yaco-Lee/SaraswatiApp
|
8c8bc03987c7f921611864ba58945f6ec4f33b6d
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render
def home_view():
return HttpResponse("<h1> Saraswati App </h1>")
| 20.111111
| 51
| 0.773481
| 24
| 181
| 5.791667
| 0.583333
| 0.215827
| 0.273381
| 0.359712
| 0.446043
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012903
| 0.143646
| 181
| 8
| 52
| 22.625
| 0.883871
| 0
| 0
| 0.4
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.6
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
24d58c6467d7053c14ac14013152e0f502892a7a
| 13,390
|
py
|
Python
|
go/routers/group/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
go/routers/group/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
go/routers/group/tests/test_views.py
|
lynnUg/vumi-go
|
852f906c46d5d26940bd6699f11488b73bbc3742
|
[
"BSD-3-Clause"
] | null | null | null |
from go.base.tests.helpers import GoDjangoTestCase
from go.routers.tests.view_helpers import RouterViewsHelper
from go.vumitools.api import VumiApiCommand
class GroupViewTests(GoDjangoTestCase):
def setUp(self):
self.router_helper = self.add_helper(RouterViewsHelper(u'group'))
self.user_helper = self.router_helper.vumi_helper.get_or_create_user()
self.client = self.router_helper.get_client()
def test_new_router(self):
router_store = self.user_helper.user_api.router_store
self.assertEqual([], router_store.list_routers())
response = self.client.post(self.router_helper.get_new_view_url(), {
'name': u"myrouter",
'router_type': u'group',
})
[router_key] = router_store.list_routers()
rtr_helper = self.router_helper.get_router_helper_by_key(router_key)
self.assertRedirects(response, rtr_helper.get_view_url('edit'))
def test_show_stopped(self):
rtr_helper = self.router_helper.create_router_helper(name=u"myrouter")
response = self.client.get(rtr_helper.get_view_url('show'))
router = response.context[0].get('router')
self.assertEqual(router.name, u"myrouter")
self.assertContains(response, rtr_helper.get_view_url('start'))
self.assertNotContains(response, rtr_helper.get_view_url('stop'))
def test_show_running(self):
rtr_helper = self.router_helper.create_router_helper(
name=u"myrouter", started=True)
response = self.client.get(rtr_helper.get_view_url('show'))
router = response.context[0].get('router')
self.assertEqual(router.name, u"myrouter")
self.assertNotContains(response, rtr_helper.get_view_url('start'))
self.assertContains(response, rtr_helper.get_view_url('stop'))
def test_start(self):
rtr_helper = self.router_helper.create_router_helper(started=False)
response = self.client.post(rtr_helper.get_view_url('start'))
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertTrue(router.starting())
[start_cmd] = self.router_helper.get_api_commands_sent()
self.assertEqual(
start_cmd, VumiApiCommand.command(
'%s_router' % (router.router_type,), 'start',
user_account_key=router.user_account.key,
router_key=router.key))
def test_stop(self):
rtr_helper = self.router_helper.create_router_helper(started=True)
response = self.client.post(rtr_helper.get_view_url('stop'))
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertTrue(router.stopping())
[start_cmd] = self.router_helper.get_api_commands_sent()
self.assertEqual(
start_cmd, VumiApiCommand.command(
'%s_router' % (router.router_type,), 'stop',
user_account_key=router.user_account.key,
router_key=router.key))
def test_get_edit_empty_config(self):
rtr_helper = self.router_helper.create_router_helper(started=True)
response = self.client.get(rtr_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
def test_edit_shows_only_static_groups(self):
static_group = self.router_helper.create_group(u'staticgroup')
smart_group = self.router_helper.create_smart_group(u'smartgroup', u'')
rtr_helper = self.router_helper.create_router_helper(started=True)
response = self.client.get(rtr_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, static_group.key)
self.assertContains(response, static_group.name)
self.assertNotContains(response, smart_group.key)
self.assertNotContains(response, smart_group.name)
self.assertContains(
response, "Smart groups are not currently supported")
def test_get_edit_small_config(self):
group = self.router_helper.create_group(u'mygroup')
rtr_helper = self.router_helper.create_router_helper(
started=True, config={'rules': [
{
'group': group.key,
'endpoint': 'target_endpoint',
},
]})
response = self.client.get(rtr_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, group.name)
self.assertContains(response, 'target_endpoint')
def test_edit_router_config(self):
group1 = self.router_helper.create_group(u'mygroup 1')
group2 = self.router_helper.create_group(u'mygroup 2')
rtr_helper = self.router_helper.create_router_helper(started=True)
router = rtr_helper.get_router()
self.assertEqual(router.config, {})
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['0'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group1.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': [''],
'rules-1-group': [group2.key],
'rules-1-endpoint': ['bar'],
'rules-1-DELETE': [''],
})
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group1.key, 'endpoint': 'foo'},
{'group': group2.key, 'endpoint': 'bar'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['foo', 'bar']))
def test_edit_router_group_config_with_delete(self):
group1 = self.router_helper.create_group(u'mygroup 1')
group2 = self.router_helper.create_group(u'mygroup 2')
rtr_helper = self.router_helper.create_router_helper(started=True)
router = rtr_helper.get_router()
self.assertEqual(router.config, {})
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['0'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group1.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': ['on'],
'rules-1-group': [group2.key],
'rules-1-endpoint': ['bar'],
'rules-1-DELETE': [''],
})
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group2.key, 'endpoint': 'bar'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['bar']))
def test_edit_router_group_config_with_delete_missing_group(self):
group = self.router_helper.create_group(u'mygroup')
rtr_helper = self.router_helper.create_router_helper(started=True)
router = rtr_helper.get_router()
self.assertEqual(router.config, {})
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['0'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': ['badgroup'],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': ['on'],
'rules-1-group': [group.key],
'rules-1-endpoint': ['bar'],
'rules-1-DELETE': [''],
})
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group.key, 'endpoint': 'bar'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['bar']))
def test_edit_router_group_config_with_unmodified_extra_form(self):
group = self.router_helper.create_group(u'mygroup')
rtr_helper = self.router_helper.create_router_helper(
started=True, extra_outbound_endpoints=[u'foo'],
config={u'rules': [{'group': group.key, 'endpoint': 'foo'}]})
router = rtr_helper.get_router()
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['1'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': [''],
'rules-1-group': [''],
'rules-1-endpoint': [''],
'rules-1-DELETE': [''],
})
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group.key, 'endpoint': 'foo'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['foo']))
def test_edit_router_group_config_extra_form_empty_group(self):
group = self.router_helper.create_group(u'mygroup')
rtr_helper = self.router_helper.create_router_helper(
started=True, extra_outbound_endpoints=[u'foo'],
config={u'rules': [{'group': group.key, 'endpoint': 'foo'}]})
router = rtr_helper.get_router()
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['1'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': [''],
'rules-1-group': [''],
'rules-1-endpoint': ['bar'],
'rules-1-DELETE': [''],
})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context['edit_forms'][0].errors,
[{}, {'group': [u'This field is required.']}])
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group.key, 'endpoint': 'foo'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['foo']))
def test_edit_router_group_config_extra_form_empty_endpoint(self):
group = self.router_helper.create_group(u'mygroup')
other_group = self.router_helper.create_group(u'othergroup')
rtr_helper = self.router_helper.create_router_helper(
started=True, extra_outbound_endpoints=[u'foo'],
config={u'rules': [{'group': group.key, 'endpoint': 'foo'}]})
router = rtr_helper.get_router()
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['1'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': [''],
'rules-1-group': [other_group.key],
'rules-1-endpoint': [''],
'rules-1-DELETE': [''],
})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context['edit_forms'][0].errors,
[{}, {'endpoint': [u'This field is required.']}])
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group.key, 'endpoint': 'foo'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['foo']))
def test_edit_router_group_config_extra_form_new_entry(self):
group = self.router_helper.create_group(u'mygroup')
other_group = self.router_helper.create_group(u'othergroup')
rtr_helper = self.router_helper.create_router_helper(
started=True, extra_outbound_endpoints=[u'foo'],
config={u'rules': [{'group': group.key, 'endpoint': 'foo'}]})
router = rtr_helper.get_router()
response = self.client.post(rtr_helper.get_view_url('edit'), {
'rules-TOTAL_FORMS': ['2'],
'rules-INITIAL_FORMS': ['1'],
'rules-MAX_NUM_FORMS': [''],
'rules-0-group': [group.key],
'rules-0-endpoint': ['foo'],
'rules-0-DELETE': [''],
'rules-1-group': [other_group.key],
'rules-1-endpoint': ['bar'],
'rules-1-DELETE': [''],
})
self.assertRedirects(response, rtr_helper.get_view_url('show'))
router = rtr_helper.get_router()
self.assertEqual(router.config, {u'rules': [
{'group': group.key, 'endpoint': 'foo'},
{'group': other_group.key, 'endpoint': 'bar'},
]})
self.assertEqual(set(router.extra_inbound_endpoints), set())
self.assertEqual(
set(router.extra_outbound_endpoints), set(['foo', 'bar']))
| 45.856164
| 79
| 0.611352
| 1,542
| 13,390
| 5.046044
| 0.076524
| 0.06593
| 0.064773
| 0.079167
| 0.878036
| 0.843465
| 0.842437
| 0.837553
| 0.814034
| 0.790387
| 0
| 0.008846
| 0.240179
| 13,390
| 291
| 80
| 46.013746
| 0.755947
| 0
| 0
| 0.755556
| 0
| 0
| 0.138013
| 0
| 0
| 0
| 0
| 0
| 0.211111
| 1
| 0.059259
| false
| 0
| 0.011111
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7000591eb8a4f6d8ca525e77c98fa7ef119dd9f3
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_darius/na_darius_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_darius/na_darius_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_darius/na_darius_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Darius_Sup_Aatrox(Ratings):
pass
class NA_Darius_Sup_Ahri(Ratings):
pass
class NA_Darius_Sup_Akali(Ratings):
pass
class NA_Darius_Sup_Alistar(Ratings):
pass
class NA_Darius_Sup_Amumu(Ratings):
pass
class NA_Darius_Sup_Anivia(Ratings):
pass
class NA_Darius_Sup_Annie(Ratings):
pass
class NA_Darius_Sup_Ashe(Ratings):
pass
class NA_Darius_Sup_AurelionSol(Ratings):
pass
class NA_Darius_Sup_Azir(Ratings):
pass
class NA_Darius_Sup_Bard(Ratings):
pass
class NA_Darius_Sup_Blitzcrank(Ratings):
pass
class NA_Darius_Sup_Brand(Ratings):
pass
class NA_Darius_Sup_Braum(Ratings):
pass
class NA_Darius_Sup_Caitlyn(Ratings):
pass
class NA_Darius_Sup_Camille(Ratings):
pass
class NA_Darius_Sup_Cassiopeia(Ratings):
pass
class NA_Darius_Sup_Chogath(Ratings):
pass
class NA_Darius_Sup_Corki(Ratings):
pass
class NA_Darius_Sup_Darius(Ratings):
pass
class NA_Darius_Sup_Diana(Ratings):
pass
class NA_Darius_Sup_Draven(Ratings):
pass
class NA_Darius_Sup_DrMundo(Ratings):
pass
class NA_Darius_Sup_Ekko(Ratings):
pass
class NA_Darius_Sup_Elise(Ratings):
pass
class NA_Darius_Sup_Evelynn(Ratings):
pass
class NA_Darius_Sup_Ezreal(Ratings):
pass
class NA_Darius_Sup_Fiddlesticks(Ratings):
pass
class NA_Darius_Sup_Fiora(Ratings):
pass
class NA_Darius_Sup_Fizz(Ratings):
pass
class NA_Darius_Sup_Galio(Ratings):
pass
class NA_Darius_Sup_Gangplank(Ratings):
pass
class NA_Darius_Sup_Garen(Ratings):
pass
class NA_Darius_Sup_Gnar(Ratings):
pass
class NA_Darius_Sup_Gragas(Ratings):
pass
class NA_Darius_Sup_Graves(Ratings):
pass
class NA_Darius_Sup_Hecarim(Ratings):
pass
class NA_Darius_Sup_Heimerdinger(Ratings):
pass
class NA_Darius_Sup_Illaoi(Ratings):
pass
class NA_Darius_Sup_Irelia(Ratings):
pass
class NA_Darius_Sup_Ivern(Ratings):
pass
class NA_Darius_Sup_Janna(Ratings):
pass
class NA_Darius_Sup_JarvanIV(Ratings):
pass
class NA_Darius_Sup_Jax(Ratings):
pass
class NA_Darius_Sup_Jayce(Ratings):
pass
class NA_Darius_Sup_Jhin(Ratings):
pass
class NA_Darius_Sup_Jinx(Ratings):
pass
class NA_Darius_Sup_Kalista(Ratings):
pass
class NA_Darius_Sup_Karma(Ratings):
pass
class NA_Darius_Sup_Karthus(Ratings):
pass
class NA_Darius_Sup_Kassadin(Ratings):
pass
class NA_Darius_Sup_Katarina(Ratings):
pass
class NA_Darius_Sup_Kayle(Ratings):
pass
class NA_Darius_Sup_Kayn(Ratings):
pass
class NA_Darius_Sup_Kennen(Ratings):
pass
class NA_Darius_Sup_Khazix(Ratings):
pass
class NA_Darius_Sup_Kindred(Ratings):
pass
class NA_Darius_Sup_Kled(Ratings):
pass
class NA_Darius_Sup_KogMaw(Ratings):
pass
class NA_Darius_Sup_Leblanc(Ratings):
pass
class NA_Darius_Sup_LeeSin(Ratings):
pass
class NA_Darius_Sup_Leona(Ratings):
pass
class NA_Darius_Sup_Lissandra(Ratings):
pass
class NA_Darius_Sup_Lucian(Ratings):
pass
class NA_Darius_Sup_Lulu(Ratings):
pass
class NA_Darius_Sup_Lux(Ratings):
pass
class NA_Darius_Sup_Malphite(Ratings):
pass
class NA_Darius_Sup_Malzahar(Ratings):
pass
class NA_Darius_Sup_Maokai(Ratings):
pass
class NA_Darius_Sup_MasterYi(Ratings):
pass
class NA_Darius_Sup_MissFortune(Ratings):
pass
class NA_Darius_Sup_MonkeyKing(Ratings):
pass
class NA_Darius_Sup_Mordekaiser(Ratings):
pass
class NA_Darius_Sup_Morgana(Ratings):
pass
class NA_Darius_Sup_Nami(Ratings):
pass
class NA_Darius_Sup_Nasus(Ratings):
pass
class NA_Darius_Sup_Nautilus(Ratings):
pass
class NA_Darius_Sup_Nidalee(Ratings):
pass
class NA_Darius_Sup_Nocturne(Ratings):
pass
class NA_Darius_Sup_Nunu(Ratings):
pass
class NA_Darius_Sup_Olaf(Ratings):
pass
class NA_Darius_Sup_Orianna(Ratings):
pass
class NA_Darius_Sup_Ornn(Ratings):
pass
class NA_Darius_Sup_Pantheon(Ratings):
pass
class NA_Darius_Sup_Poppy(Ratings):
pass
class NA_Darius_Sup_Quinn(Ratings):
pass
class NA_Darius_Sup_Rakan(Ratings):
pass
class NA_Darius_Sup_Rammus(Ratings):
pass
class NA_Darius_Sup_RekSai(Ratings):
pass
class NA_Darius_Sup_Renekton(Ratings):
pass
class NA_Darius_Sup_Rengar(Ratings):
pass
class NA_Darius_Sup_Riven(Ratings):
pass
class NA_Darius_Sup_Rumble(Ratings):
pass
class NA_Darius_Sup_Ryze(Ratings):
pass
class NA_Darius_Sup_Sejuani(Ratings):
pass
class NA_Darius_Sup_Shaco(Ratings):
pass
class NA_Darius_Sup_Shen(Ratings):
pass
class NA_Darius_Sup_Shyvana(Ratings):
pass
class NA_Darius_Sup_Singed(Ratings):
pass
class NA_Darius_Sup_Sion(Ratings):
pass
class NA_Darius_Sup_Sivir(Ratings):
pass
class NA_Darius_Sup_Skarner(Ratings):
pass
class NA_Darius_Sup_Sona(Ratings):
pass
class NA_Darius_Sup_Soraka(Ratings):
pass
class NA_Darius_Sup_Swain(Ratings):
pass
class NA_Darius_Sup_Syndra(Ratings):
pass
class NA_Darius_Sup_TahmKench(Ratings):
pass
class NA_Darius_Sup_Taliyah(Ratings):
pass
class NA_Darius_Sup_Talon(Ratings):
pass
class NA_Darius_Sup_Taric(Ratings):
pass
class NA_Darius_Sup_Teemo(Ratings):
pass
class NA_Darius_Sup_Thresh(Ratings):
pass
class NA_Darius_Sup_Tristana(Ratings):
pass
class NA_Darius_Sup_Trundle(Ratings):
pass
class NA_Darius_Sup_Tryndamere(Ratings):
pass
class NA_Darius_Sup_TwistedFate(Ratings):
pass
class NA_Darius_Sup_Twitch(Ratings):
pass
class NA_Darius_Sup_Udyr(Ratings):
pass
class NA_Darius_Sup_Urgot(Ratings):
pass
class NA_Darius_Sup_Varus(Ratings):
pass
class NA_Darius_Sup_Vayne(Ratings):
pass
class NA_Darius_Sup_Veigar(Ratings):
pass
class NA_Darius_Sup_Velkoz(Ratings):
pass
class NA_Darius_Sup_Vi(Ratings):
pass
class NA_Darius_Sup_Viktor(Ratings):
pass
class NA_Darius_Sup_Vladimir(Ratings):
pass
class NA_Darius_Sup_Volibear(Ratings):
pass
class NA_Darius_Sup_Warwick(Ratings):
pass
class NA_Darius_Sup_Xayah(Ratings):
pass
class NA_Darius_Sup_Xerath(Ratings):
pass
class NA_Darius_Sup_XinZhao(Ratings):
pass
class NA_Darius_Sup_Yasuo(Ratings):
pass
class NA_Darius_Sup_Yorick(Ratings):
pass
class NA_Darius_Sup_Zac(Ratings):
pass
class NA_Darius_Sup_Zed(Ratings):
pass
class NA_Darius_Sup_Ziggs(Ratings):
pass
class NA_Darius_Sup_Zilean(Ratings):
pass
class NA_Darius_Sup_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
7006309453752b0fe1db0708c5bbba3447541024
| 2,468
|
py
|
Python
|
amy/workshops/migrations/0221_auto_20201025_1113.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 53
|
2015-01-10T17:39:19.000Z
|
2019-06-12T17:36:34.000Z
|
amy/workshops/migrations/0221_auto_20201025_1113.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 1,176
|
2015-01-02T06:32:47.000Z
|
2019-06-18T11:57:47.000Z
|
amy/workshops/migrations/0221_auto_20201025_1113.py
|
code-review-doctor/amy
|
268c1a199510457891459f3ddd73fcce7fe2b974
|
[
"MIT"
] | 44
|
2015-01-03T15:08:56.000Z
|
2019-06-09T05:33:08.000Z
|
# Generated by Django 2.2.13 on 2020-10-25 11:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0220_event_public_status'),
]
operations = [
migrations.AlterField(
model_name='person',
name='data_privacy_agreement',
field=models.BooleanField(default=False, verbose_name='I have read and agree to <a href="https://docs.carpentries.org/topic_folders/policies/privacy.html" target="_blank" rel="noreferrer">the data privacy policy</a> of The Carpentries.'),
),
migrations.AlterField(
model_name='person',
name='may_contact',
field=models.BooleanField(default=True, help_text='Allow to contact from The Carpentries according to the <a href="https://docs.carpentries.org/topic_folders/policies/privacy.html" target="_blank" rel="noreferrer">Privacy Policy</a>.'),
),
migrations.AlterField(
model_name='trainingrequest',
name='code_of_conduct_agreement',
field=models.BooleanField(default=False, verbose_name='I agree to abide by The Carpentries\' <a href="https://docs.carpentries.org/topic_folders/policies/code-of-conduct.html" target="_blank" rel="noreferrer">Code of Conduct</a>.'),
),
migrations.AlterField(
model_name='trainingrequest',
name='data_privacy_agreement',
field=models.BooleanField(default=False, verbose_name='I have read and agree to <a href="https://docs.carpentries.org/topic_folders/policies/privacy.html" target="_blank" rel="noreferrer">the data privacy policy</a> of The Carpentries.'),
),
migrations.AlterField(
model_name='workshoprequest',
name='code_of_conduct_agreement',
field=models.BooleanField(default=False, verbose_name='I agree to abide by The Carpentries\' <a href="https://docs.carpentries.org/topic_folders/policies/code-of-conduct.html" target="_blank" rel="noreferrer">Code of Conduct</a>.'),
),
migrations.AlterField(
model_name='workshoprequest',
name='data_privacy_agreement',
field=models.BooleanField(default=False, verbose_name='I have read and agree to <a href="https://docs.carpentries.org/topic_folders/policies/privacy.html" target="_blank" rel="noreferrer">the data privacy policy</a> of The Carpentries.'),
),
]
| 56.090909
| 250
| 0.678282
| 296
| 2,468
| 5.52027
| 0.239865
| 0.073439
| 0.091799
| 0.106487
| 0.837821
| 0.837821
| 0.791922
| 0.755202
| 0.755202
| 0.755202
| 0
| 0.010081
| 0.19611
| 2,468
| 43
| 251
| 57.395349
| 0.813508
| 0.018639
| 0
| 0.756757
| 1
| 0.108108
| 0.496694
| 0.067769
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
704c4b7cda27d063fbb314bb1d09928956954ad0
| 80,686
|
py
|
Python
|
src/OYTX_Recog/Hierarchical_Attn_With_Senti_Map_on_deap.py
|
Ruiver/CTCNet
|
539e55ec9fed06028379d35dfd5cd4074755ffd8
|
[
"Apache-2.0"
] | 6
|
2020-09-17T06:30:41.000Z
|
2021-11-07T14:19:23.000Z
|
src/OYTX_Recog/Hierarchical_Attn_With_Senti_Map_on_deap.py
|
Ruiver/CTCNet
|
539e55ec9fed06028379d35dfd5cd4074755ffd8
|
[
"Apache-2.0"
] | null | null | null |
src/OYTX_Recog/Hierarchical_Attn_With_Senti_Map_on_deap.py
|
Ruiver/CTCNet
|
539e55ec9fed06028379d35dfd5cd4074755ffd8
|
[
"Apache-2.0"
] | 3
|
2020-09-21T13:00:47.000Z
|
2021-10-30T07:32:05.000Z
|
"""
author: ouyangtianxiong
date: 2020/3/03
des: implements attention-based emotion recognition on deap dataset
Based on code from https://github.com/KaihuaTang/VQA2.0-Recent-Approachs-2018.pytorch
"""
import sys
sys.path.append('../')
__author__ = 'ouyangtianxiong.bupt.edu.cn'
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch.nn.utils import clip_grad_norm_
from torch.optim import Adam,SGD,RMSprop
from torch.nn import CrossEntropyLoss
import numpy as np
from Common_utils.model_evaluation import plot_acc_loss_curve
from Common_utils.model_training import GradualWarmupScheduler, LabelSmoothSoftmax
from Common_utils.basic_module import FCNet
import os
from data_set.deap_feature import DEAP, DEAP_DATASET, DEAP128
#from Hierarchical_Attn import MultiBlocks, OneSideInterModalityUpdate, InterModalityUpdate,SingleBlock, Classifier
import pandas as pd
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
from sklearn.model_selection import KFold
from Common_utils.basic_utils import deap_normalization
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
# device = torch.device('cpu')
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
class Classifier(nn.Sequential):
def __init__(self, in_features, mid_features, out_features, drop=0.0):
super(Classifier, self).__init__()
# define number of detector for each sentiment class
self.lin1 = FCNet(in_features, mid_features, activate='relu', drop=drop)
self.lin2 = FCNet(mid_features, out_features, drop=drop)
#
self.bilinear = nn.Bilinear(in1_features=in_features, in2_features=in_features, out_features=mid_features)
def forward(self, v, q):
"""
:param v: [batch, r1, features]
:param q: [batch, r2, features]
:return:
"""
num_obj = v.shape[2]
max_len = q.shape[2]
v_mean = v.sum(1) / num_obj
q_mean = q.sum(1) / max_len
#print("classifier v_mean", v_mean[0])
#print("classifier q_mean", q_mean[0])
#out = self.lin1(v_mean * q_mean)
out = self.lin1(v_mean * q_mean)
out = self.bilinear(v_mean, q_mean)
#print("classifier out 1", out[0])
out = self.lin2(out)
#print("classifier out 2", out[0])
return out
class InterModalityUpdate(nn.Module):
"""
Inter-Modality Attention Flow
"""
def __init__(self, v_size, q_size, output_size, num_head, drop=0.0):
super(InterModalityUpdate, self).__init__()
self.v_size = v_size
self.q_size = q_size
self.output_size = output_size
self.num_head = num_head
self.v_lin = FCNet(v_size, output_size * 3, drop=drop, activate='relu')
self.q_lin = FCNet(q_size, output_size * 3, drop=drop, activate='relu')
self.v_output = FCNet(output_size + v_size, output_size, drop=drop, activate='relu')
self.q_output = FCNet(output_size + q_size, output_size, drop=drop, activate='relu')
def forward(self, v, q):
"""
:param v: eeg feature [batch, regions, feature_size]
:param q: eye feature [batch, regions, feature_size]
:return:
"""
batch_size, num_obj = v.shape[0], v.shape[1]
max_len = q.shape[1]
# transfer feature to Q, K ,V matrix, here Q, K, V are concat together
v_tran = self.v_lin(v)
q_tran = self.q_lin(q)
# mask all padding object/word feature
# split Q, K, V
v_key, v_query, v_val = torch.split(v_tran, v_tran.size(2) // 3, dim=2)
q_key, q_query, q_val = torch.split(q_tran, q_tran.size(2) // 3, dim=2)
# apply multi-head
v_key_set = torch.split(v_key, v_key.size(2) // self.num_head, dim=2)
v_query_set = torch.split(v_query, v_query.size(2) // self.num_head, dim=2)
v_val_set = torch.split(v_val, v_val.size(2) // self.num_head, dim=2)
q_key_set = torch.split(q_key, q_key.size(2) // self.num_head, dim=2)
q_query_set = torch.split(q_query, q_query.size(2) // self.num_head, dim=2)
q_val_set = torch.split(q_val, q_val.size(2) // self.num_head, dim=2)
# apply multi-head operation
for i in range(self.num_head):
v_key_slice, v_query_slice, v_val_slice = v_key_set[i], v_query_set[i], v_val_set[i]
q_key_slice, q_query_slice, q_val_slice = q_key_set[i], q_query_set[i], q_val_set[i]
# calculating attention
# [batch, num_obj, max_len]
#print(v_query_slice.shape, q_key_slice.shape)
q2v = (v_query_slice @ q_key_slice.transpose(1, 2)) / ((self.output_size // self.num_head) ** 0.5)
#print(q_query_slice.shape, v_key_slice.shape)
v2q = (q_query_slice @ v_key_slice.transpose(1, 2)) / ((self.output_size // self.num_head) ** 0.5)
# softmax attention
interMAF_q2v = F.softmax(q2v, dim=2).unsqueeze(3) #[batch_size, num_obj, max_len, 1]
interMAF_v2q = F.softmax(v2q, dim=2).unsqueeze(3) #[batch_size, max_len, num_obj, 1] torch.cat((v_update, (interMAF_q2v * q_val_slice.unsqueeze(1)).sum(2)), dim=2)
v_update = (interMAF_q2v * q_val_slice.unsqueeze(1)).sum(2) if (i == 0) else torch.cat((v_update, (interMAF_q2v * q_val_slice.unsqueeze(1)).sum(2)), dim=2)
q_update = (interMAF_v2q * v_val_slice.unsqueeze(1)).sum(2) if (i == 0) else torch.cat((q_update, (interMAF_v2q * v_val_slice.unsqueeze(1)).sum(2)), dim=2)
# update new feature
cat_v = torch.cat((v, v_update), dim=2)
cat_q = torch.cat((q, q_update), dim=2)
updated_v = self.v_output(cat_v)
updated_q = self.q_output(cat_q)
return updated_v, updated_q
class OneSideInterModalityUpdate(nn.Module):
"""
one-side Inter-Modality Attention Flow
according to the paper, instead of parallel V->Q & Q->V, we first to V->Q and then Q->V
"""
def __init__(self,src_size,tgt_size,output_size,num_head,drop=0.0):
super(OneSideInterModalityUpdate, self).__init__()
self.src_size = src_size
self.tgt_size = tgt_size
self.output_size = output_size
self.num_head = num_head
self.src_lin = FCNet(src_size, output_size * 2, drop=drop, activate='relu')
self.tgt_lin = FCNet(tgt_size, output_size, drop=drop, activate='relu')
self.tgt_output = FCNet(output_size + tgt_size, output_size, drop=drop, activate='relu')
def forward(self, src, tgt):
"""
:param src: eeg feature [batch, regions, feature_size]
:param tgt: eye feature [batch, regions, feature_size]
:return:
"""
batch_size, num_src = src.shape[0],src.shape[1]
num_tgt = tgt.shape[1]
src_tran = self.src_lin(src)
tgt_tran = self.tgt_lin(tgt)
src_key, src_val = torch.split(src_tran, src_tran.size(2) // 2, dim=2)
tgt_query = tgt_tran
src_key_set = torch.split(src_key, src_key.size(2) // self.num_head, dim=2)
src_val_set = torch.split(src_val, src_val.size(2) // self.num_head, dim=2)
tgt_query_set = torch.split(tgt_query,tgt_query.size(2) // self.num_head, dim=2)
for i in range(self.num_head):
src_key_slice, tgt_query_slice, src_val_slice = src_key_set[i], tgt_query_set[i], src_val_set[i]
src2tgt = (tgt_query_slice @ src_key_slice.transpose(1, 2)) / ((self.output_size // self.num_head) ** 0.5)
interMAF_src2tgt = F.softmax(src2tgt, dim=2).unsqueeze(3)
tgt_update = (interMAF_src2tgt * src_val_slice.unsqueeze(1)).sum(2) if (i == 0) else torch.cat((tgt_update, (interMAF_src2tgt * src_val_slice.unsqueeze(1)).sum(2)), dim=2)
cat_tgt = torch.cat((tgt, tgt_update), dim=2)
tgt_updated = self.tgt_output(cat_tgt)
return tgt_updated
class DyIntraModalityUpdate(nn.Module):
"""
Dynamic Intra-Modality Attention Flow
"""
def __init__(self, v_size, q_size, output_size, num_head, drop=0.0):
super(DyIntraModalityUpdate, self).__init__()
self.v_size = v_size
self.q_size = q_size
self.output_size = output_size
self.num_head = num_head
self.v4q_gate_lin = FCNet(v_size, output_size, drop=drop)
self.q4v_gate_lin = FCNet(q_size, output_size, drop=drop)
self.v_lin = FCNet(v_size, output_size * 3, drop=drop, activate='relu')
self.q_lin = FCNet(q_size, output_size * 3, drop=drop, activate='relu')
self.v_output = FCNet(output_size, output_size,drop=drop, activate='relu')
self.q_output = FCNet(output_size, output_size, drop=drop, activate='relu')
self.relu = nn.ReLU()
self.tanh = nn.Tanh()
self.sigmoid = nn.Sigmoid()
def forward(self, v, q):
"""
:param v: [batch_size, num_obj, feature_size]
:param q: [batch_size, max_len, feature_size]
:return:
"""
batch_size, num_obj = v.shape[0], v.shape[1]
max_len = q.shape[1]
# conditioned gating vector
v_mean = v.sum(1) / num_obj
q_mean = q.sum(1) / max_len
v4q_gate = self.sigmoid(self.v4q_gate_lin(v_mean)).unsqueeze(1) # [batch_size, 1, feature_size]
q4v_gate = self.sigmoid(self.q4v_gate_lin(q_mean)).unsqueeze(1) # [batch_size, 1, feature_size]
# K, Q, V
v_tran = self.v_lin(v)
q_tran = self.q_lin(q)
# split for different use
v_key, v_query, v_val = torch.split(v_tran, v_tran.size(2) // 3, dim=2)
q_key, q_query, q_val = torch.split(q_tran, q_tran.size(2) // 3, dim=2)
# apply conditioned gate
gated_v_query = (1 + q4v_gate) * v_query
gated_v_key = (1 + q4v_gate) * v_key
gated_v_val = (1 + q4v_gate) * v_val
gated_q_query = (1 + v4q_gate) * q_query
gated_q_key = (1 + v4q_gate) * q_key
gated_q_val = (1 + v4q_gate) * q_val
# apply multi-head
v_key_set = torch.split(gated_v_key, gated_v_key.size(2) // self.num_head, dim=2)
v_query_set = torch.split(gated_v_query, gated_v_query.size(2) // self.num_head, dim=2)
v_val_set = torch.split(gated_v_val, gated_v_val.size(2) // self.num_head, dim=2)
q_key_set = torch.split(gated_q_key, gated_q_key.size(2) // self.num_head, dim=2)
q_query_set = torch.split(gated_q_query, gated_q_query.size(2) // self.num_head, dim=2)
q_val_set = torch.split(gated_q_val, gated_q_val.size(2) // self.num_head, dim=2)
for i in range(self.num_head):
v_key_slice, v_query_slice, v_val_slice = v_key_set[i], v_query_set[i], v_val_set[i]
q_key_slice, q_query_slice, q_val_slice = q_key_set[i], q_query_set[i], q_val_set[i]
# calcuating attention
v2v = (v_query_slice @ v_key_slice.transpose(1,2)) / ((self.output_size // self.num_head) ** 0.5)
q2q = (q_query_slice @ q_key_slice.transpose(1,2)) / ((self.output_size // self.num_head) ** 0.5)
dyIntranMAF_v2v = F.softmax(v2v, dim=2).unsqueeze(3) # [batch_size, num_obj, num_obj, 1]
dyIntranMAF_q2q = F.softmax(q2q, dim=2).unsqueeze(3) # [batch_size, max_len, max_len, 1]
# calculating update input
v_update = (dyIntranMAF_v2v * v_val_slice.unsqueeze(1)).sum(2) if (i == 0) else torch.cat((v_update, (dyIntranMAF_v2v * v_val_slice.unsqueeze(1)).sum(2)), dim=2)
q_update = (dyIntranMAF_q2q * q_val_slice.unsqueeze(1)).sum(2) if (i == 0) else torch.cat((q_update, (dyIntranMAF_q2q * q_val_slice.unsqueeze(1)).sum(2)), dim=2)
# update
updated_v = self.v_output(v + v_update)
updated_q = self.q_output(q + q_update)
return updated_v, updated_q
class SingleBlock(nn.Module):
"""
Single Block Inter- and Intra modality stack multiple times, in such circumstance, all the
basic blocks share the same parameters in the model
"""
def __init__(self, num_blocks, v_size, q_size, output_size, num_inter_head, num_intra_head, drop=0.0):
super(SingleBlock, self).__init__()
self.v_size = v_size
self.q_size = q_size
self.output_size = output_size
self.num_inter_head = num_inter_head
self.num_intra_head = num_intra_head
self.num_block = num_blocks
self.v_lin = FCNet(v_size, output_size, drop=drop, activate='relu')
self.q_lin = FCNet(q_size, output_size, drop=drop, activate='relu')
self.v2q_interBlock = OneSideInterModalityUpdate(output_size, output_size, output_size, num_inter_head, drop)
self.q2v_interBlock = OneSideInterModalityUpdate(output_size, output_size, output_size, num_inter_head, drop)
self.intraBlock = DyIntraModalityUpdate(output_size, output_size, output_size, num_intra_head, drop)
def forward(self, v, q):
"""
:param v: eeg feature [batch_size, regions, feature_size]
:param q: eye feature [batch_size, regions, feature_size]
:return:
"""
# transfer features
v = self.v_lin(v)
q = self.q_lin(q)
# residual connection
v_container = [v]
q_container = [q]
result_v = [v]
result_q = [q]
for i in range(self.num_block):
q1 = self.v2q_interBlock(v_container[-1], q_container[-1])
q_container.append(q1)
v1 = self.q2v_interBlock(q_container[-1], v_container[-1])
v_container.append(v1)
v2, q2 = self.intraBlock(v_container[-1] + v_container[-2], q_container[-1] + q_container[-2])
v_container.append(v2)
q_container.append(q2)
result_v.append(v1)
result_v.append(v2)
result_q.append(q1)
result_q.append(q2)
v_container.append(v_container[-1] + v_container[-2] + v_container[-3])
q_container.append(q_container[-1] + q_container[-2] + q_container[-3])
return sum(result_v), sum(result_q)
class MultiBlocks(nn.Module):
"""
Stack multiple single block layer, each layer possess their own parameters
"""
def __init__(self, num_blocks, v_size, q_size, output_size, num_inter_head, num_intra_head, drop=0.0):
super(MultiBlocks, self).__init__()
self.v_size = v_size
self.q_size = q_size
self.output_size = output_size
self.num_inter_head = num_inter_head
self.num_intra_head = num_intra_head
self.num_blocks = num_blocks
self.v_lin = FCNet(v_size, output_size, drop=drop, activate='relu')
self.q_lin = FCNet(q_size, output_size, drop=drop, activate='relu')
blocks = []
for i in range(self.num_blocks):
#blocks.append(OneSideInterModalityUpdate(output_size, output_size, output_size, num_inter_head, drop))
#blocks.append(OneSideInterModalityUpdate(output_size, output_size, output_size, num_inter_head, drop))
blocks.append(InterModalityUpdate(output_size, output_size, output_size, num_inter_head, drop))
blocks.append(DyIntraModalityUpdate(output_size, output_size, output_size, num_intra_head, drop))
self.multi_blocks = nn.ModuleList(blocks)
def forward(self, v, q):
"""
:param v: eeg feature [batch, regions, feature_size]
:param q: eye feature [batch, regions, feature_size]
:return:
"""
# transfer feature
v = self.v_lin(v)
q = self.q_lin(q)
v_container = [v]
q_container = [q]
result_v = [v]
result_q = [q]
# dense residule connection
for i in range(self.num_blocks):
# q1 = self.multi_blocks[i * 3 + 0](v_container[-1], q_container[-1])
# q_container.append(q1)
# v1 = self.multi_blocks[i * 3 + 1](q_container[-1], v_container[-1])
# v_container.append(v1)
v1, q1 = self.multi_blocks[i * 2 + 0](v_container[-1], q_container[-1])
q_container.append(q1)
v_container.append(v1)
v2, q2 = self.multi_blocks[i * 2 + 1](v_container[-1] + v_container[-2], q_container[-1] + q_container[-2])
v_container.append(v2)
q_container.append(q2)
result_v.append(v1)
result_v.append(v2)
result_q.append(q1)
result_q.append(q2)
v_container.append(v_container[-1] + v_container[-2] + v_container[-3])
q_container.append(q_container[-1] + q_container[-2] + q_container[-3])
return sum(result_v), sum(result_q)
class EEGFeatureExtractor(nn.Module):
def __init__(self, eeg_size, output_size):
super(EEGFeatureExtractor, self).__init__()
self.eeg_size = eeg_size
self.output_size = output_size
self.regions = 14 # regions的数量
self.regions_indexs = [torch.LongTensor(e) for e in
[[0, 1, 16, 17], [2, 18, 19], [3, 4], [20], [7,8],
[21,25,26], [5, 22], [6,23,24],
[9,27], [11], [29], [10,15,28],
[12, 30], [13, 14,31]]]
reginal_extractors = []
for i in range(self.regions):
reginal_extractors.append(nn.LSTM(input_size=eeg_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
self.reginalFeatureExtractors = nn.ModuleList(reginal_extractors)
self.bn = nn.BatchNorm1d(num_features=self.regions)
def forward(self, x):
"""
:param x: [batch, n_electrode, 128]
:return: [batch, regions, feature_size]
"""
batch, n_electrode, _ = x.shape
X_regions_input = [] # 列表存储不同区域的张量输入
for i in range(self.regions):
X_regions_input.append(x.index_select(dim=1, index=self.regions_indexs[i].to(device)))
X_regional_lstm_out = []
for i in range(self.regions):
shape = X_regions_input[i].shape
# print(shape)
# 先转成(B*T,n_i,d)再进LSTM
hidden_units, _ = self.reginalFeatureExtractors[i](X_regions_input[i].reshape((-1, shape[-2], shape[-1])))
X_regional_lstm_out.append(hidden_units[:, -1, :].squeeze())
# X_regional_feature : 列表:元素为tensor [ B*T, regions_num, 2*self.d_r]
# reshape成(B*T, regions, 2*self.d_r)
# (B * T, regions, 2* self.d_r)
X_regional_feature = torch.cat(X_regional_lstm_out, dim=-1).reshape(batch, self.regions, self.output_size)
return self.bn(X_regional_feature)
class PeripheralFeatureExtractor(nn.Module):
def __init__(self, peripheral_size, output_size):
super(PeripheralFeatureExtractor, self).__init__()
self.peripheral_size = peripheral_size
self.output_size = output_size
self.regions = 8
self.regions_indexs = [torch.LongTensor(e) for e in
[[0],
[1],
[2],
[3],
[4],
[5],
[6],
[7]]]
eye_extractor = []
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
eye_extractor.append(nn.LSTM(input_size=peripheral_size, hidden_size= output_size // 2, batch_first=True, bias=True, bidirectional=True))
self.eyeFeatureExtractor = nn.ModuleList(eye_extractor)
self.bn = nn.BatchNorm1d(num_features=self.regions)
def forward(self, x):
"""
:param x: EYE feature [batch, peripheral_num, 128]
:return: [batch, regons, output_size]
"""
batch, n_electrode, _ = x.shape
X_regions_input = [] # 列表存储不同区域的张量输入
for i in range(self.regions):
X_regions_input.append(x.index_select(dim=1, index=self.regions_indexs[i].to(device)))
X_regional_lstm_out = []
for i in range(self.regions):
shape = X_regions_input[i].shape
# print(shape)
# 先转成(B*T,n_i,d)再进LSTM
hidden_units, _ = self.eyeFeatureExtractor[i](X_regions_input[i].reshape((-1, shape[-2], shape[-1])))
X_regional_lstm_out.append(hidden_units[:, -1, :].squeeze())
# X_regional_feature : 列表:元素为tensor [ B*T, regions_num, 2*self.d_r]
# reshape成(B*T, regions, 2*self.d_r)
# (B * T, regions, 2* self.d_r)
X_regional_feature = torch.cat(X_regional_lstm_out, dim=-1).reshape(batch, self.regions, self.output_size)
return self.bn(X_regional_feature)
class PeripheralFeatureExtractor2(nn.Module):
def __init__(self, peripheral_size, output_size):
super(PeripheralFeatureExtractor2, self).__init__()
self.peripheral_size = peripheral_size
self.output_size = output_size
self.regions = 6
self.regions_indexs = [torch.LongTensor(e) for e in
[[0, 1],
[2, 3],
[4],
[5],
[6],
[7]]]
eye_extractor = []
eye_extractor.append(FCNet(in_size=10, out_size=output_size, activate='relu'))
eye_extractor.append(FCNet(in_size=10, out_size=output_size, activate='relu'))
eye_extractor.append(FCNet(in_size=5, out_size=output_size, activate='relu'))
eye_extractor.append(FCNet(in_size=5, out_size=output_size, activate='relu'))
eye_extractor.append(FCNet(in_size=5, out_size=output_size, activate='relu'))
eye_extractor.append(FCNet(in_size=5, out_size=output_size, activate='relu'))
self.eyeFeatureExtractor = nn.ModuleList(eye_extractor)
self.bn = nn.BatchNorm1d(num_features=self.regions)
def forward(self, x):
"""
:param x: peripheral feature [batch, 8, 5]
:return: [batch, regions, output_size]
"""
B = x.shape[0]
X_regions_input = [] # 列表存储不同区域的张量输入
for i in range(self.regions):
tmp = x.index_select(dim=1, index=self.regions_indexs[i].to(device))
#print(tmp.shape)
tmp = tmp.reshape(B, -1)
#print(tmp.shape)
X_regions_input.append(tmp)
X_regional_output = []
for i in range(self.regions):
X_regional_output.append(
self.eyeFeatureExtractor[i](X_regions_input[i])
)
X_regional_feature = torch.cat(X_regional_output, dim=-1).reshape(B, self.regions, self.output_size)
return self.bn(X_regional_feature)
class Senti_Map_Classifier(nn.Sequential):
def __init__(self, in_features, mid_features, out_features, drop=0.0):
super(Senti_Map_Classifier, self).__init__()
# define number of detector for each sentiment class
self.k = 10
self.emotion_class = out_features
eeg_detectors = []
eye_detectors = []
for i in range(out_features):
eeg_detectors.append(
nn.Conv1d(in_channels=in_features, out_channels=self.k, kernel_size=1, stride=1, padding=0, bias=True))
eye_detectors.append(
nn.Conv1d(in_channels=in_features, out_channels=self.k, kernel_size=1, stride=1, padding=0, bias=True))
self.eeg_detectors = nn.ModuleList(eeg_detectors)
self.eye_detectors = nn.ModuleList(eye_detectors)
self.lin1 = FCNet(in_features * self.emotion_class, mid_features, activate='relu', drop=drop)
self.lin2 = FCNet(mid_features, out_features, drop=drop)
emotion_classifer = []
self.bilinears = nn.ModuleList([nn.Bilinear(in1_features=in_features, in2_features=in_features, out_features=in_features) for _ in range(self.emotion_class)])
for i in range(self.emotion_class):
emotion_classifer.append(nn.Sequential(
nn.Dropout(p=drop),
FCNet(in_features, mid_features, activate='relu', drop=drop),
FCNet(mid_features, 1, drop=drop)
))
self.emotion_classifer = nn.ModuleList(emotion_classifer)
def eeg_senti_relevance_detect(self, v):
b, r, d = v.shape
v = v.permute(0, 2, 1)
eeg_activate = []
for i in range(self.emotion_class):
# [batch, k, r]
k_eeg_activate_per_class = torch.softmax(self.eeg_detectors[i](v), dim=-1)
# [batch, 1, k ,r]
eeg_activate = k_eeg_activate_per_class.unsqueeze(dim=1) if i == 0 else torch.cat(
(eeg_activate, k_eeg_activate_per_class.unsqueeze(dim=1)), dim=1)
assert eeg_activate.shape == torch.Size([b, self.emotion_class, self.k, r]), "sentiment map wrong!! {}".format(
eeg_activate.shape)
return eeg_activate.permute(0, 1, 3, 2)
def eye_senti_relevance_detect(self, v):
b, r, d = v.shape
v = v.permute(0, 2, 1)
eye_activate = []
for i in range(self.emotion_class):
# [batch, k, r]
k_eye_activate_per_class = torch.softmax(self.eeg_detectors[i](v), dim=-1)
# [batch, 1, k ,r]
eye_activate = k_eye_activate_per_class.unsqueeze(dim=1) if i == 0 else torch.cat(
(eye_activate, k_eye_activate_per_class.unsqueeze(dim=1)), dim=1)
assert eye_activate.shape == torch.Size([b, self.emotion_class, self.k, r]), "sentiment map wrong!! {}".format(
eye_activate.shape)
return eye_activate.permute(0, 1, 3, 2)
def forward(self, v, q):
"""
:param v: [batch, r1, features]
:param q: [batch, r2, features]
:return:
"""
b, r1 = v.shape[0], v.shape[1]
r2 = q.shape[1]
eeg = v
eye = q
# [batch, emotion_class, r1, k]
eeg_senti_relevance = self.eeg_senti_relevance_detect(v)
# [batch, emotion_class, r2, k]
eye_senti_relevance = self.eye_senti_relevance_detect(q)
# # [batch, emotion_class, r1,1]
# attn_eeg = eeg_senti_relevance.sum(dim=3, keepdims=True) / self.k
#
# # [batch, emotion_class, r2,1]
# attn_eye = eye_senti_relevance.sum(dim=3, keepdims=True) / self.k
#
# # [batch, emotion_class, r1, 1] * [batch, 1, r1, features] = [batch, emotion class, r, features]
# # introduce learnable sentiment relevance
# map_eeg = attn_eeg * eeg.unsqueeze(dim=1)
# map_eye = attn_eye * eye.unsqueeze(dim=1)
# # [batch, emotion_class, feature]
# map_eeg = map_eeg.sum(dim=2) / r1
# map_eye = map_eye.sum(dim=2) / r2
#
#
# fusion_feature = map_eeg * map_eye
# final = fusion_feature.view(b, -1)
# out = self.lin1(final)
# out = self.lin2(out)
# [batch, emotion_class, r1,1]
attn_eeg = eeg_senti_relevance.sum(dim=3, keepdim=True) / self.k
emotion_eeg_attn = attn_eeg.squeeze().sum(dim=2, keepdim=False) / attn_eeg.size(2)
# [batch, emotion_class] represents the maximum activate in sentiment map
emotion_eeg_attn = emotion_eeg_attn.squeeze()
# [batch, emotion_class, r2,1]
attn_eye = eye_senti_relevance.sum(dim=3, keepdim=True) / self.k
emotion_eye_attn = attn_eye.squeeze().sum(dim=2, keepdim=False) / attn_eye.size(2)
emotion_eye_attn = emotion_eye_attn.squeeze()
# [batch, emotion_class, r1, 1] * [batch, 1, r1, features] = [batch, emotion class, r, features]
# introduce learnable sentiment relevance
map_eeg = attn_eeg * eeg.unsqueeze(dim=1)
map_eye = attn_eye * eye.unsqueeze(dim=1)
out = []
for i in range(self.emotion_class):
emotion_specific_eeg = map_eeg[:, i, :, :].squeeze().mean(1).squeeze()
emotion_specific_eye = map_eye[:, i, :, :].squeeze().mean(1).squeeze()
tmp = self.bilinears[i](emotion_specific_eeg, emotion_specific_eye)
out1 = self.emotion_classifer[i](tmp)
out.append(out1)
out = torch.cat(out, dim=-1)
return out # , torch.softmax(emotion_eeg_attn,dim=1), torch.softmax(emotion_eye_attn,dim=1)
class Hierarchical_ATTN_With_Senti_Map(nn.Module):
def __init__(self, class_num=4):
super(Hierarchical_ATTN_With_Senti_Map, self).__init__()
# self.eeg_features = 16 # 256
# self.peripheral_features = 16 # 256
self.eeg_features = 64 # 256
self.peripheral_features = 64 # 256
self.hidden_feature = 128 # 256
self.num_inter_head = 4
self.num_intra_head = 4
self.num_block = 1
assert self.hidden_feature % self.num_inter_head == 0, 'hidden features size can not be divided by header nums, please check!!'
assert self.hidden_feature % self.num_inter_head == 0, 'hidden features size can not be divided by header nums, please check!!'
# basic feature extractor
self.eegFeatureExtractor = EEGFeatureExtractor(eeg_size=5, output_size=self.eeg_features)
self.eyeFeatureExtractor = PeripheralFeatureExtractor2(peripheral_size=5, output_size=self.peripheral_features)
# inter- & intra-modality attention flow mechanism for fusion cross modality feature
self.interIntraBlocks = MultiBlocks(
num_blocks=self.num_block,
v_size=self.peripheral_features,
q_size=self.eeg_features,
output_size=self.hidden_feature,
num_inter_head=self.num_inter_head,
num_intra_head=self.num_intra_head,
drop=0.5
)
# emotion classifier
# self.classifier = Classifier(
# in_features=self.hidden_feature,
# mid_features=512, out_features=class_num,
# drop=0.5)
self.classifier = Senti_Map_Classifier(
in_features=self.hidden_feature,
mid_features=512, out_features=class_num,
drop=0.5)
def forward(self, v, q):
"""
:param v: eeg feature [batch, n, 5]
:param q: eye feature [batch, 31]
:return: predict logits [batch, max_answer]
"""
# prepare v & q feature
v = self.eegFeatureExtractor(v)
q = self.eyeFeatureExtractor(q)
# feature normalization
v = v / (v.norm(p=2, dim=2, keepdim=True) + 1e-12).expand_as(v) # [batch, num_obj, feature]
q = q / (q.norm(p=2, dim=2, keepdim=True) + 1e-12).expand_as(q)
# inter- & intra- modality attention flow
v, q = self.interIntraBlocks(v, q)
# predict logits
answer = self.classifier(v, q)
return answer
def generate_k_data(data,n_split=10,shuffle=True):
if shuffle:
np.random.shuffle(data)
total_count = data.shape[0]
for k in range(n_split):
pass
def subject_dependent(individual=1, class_target=4):
class_list = [
"Valence",
"Arousal",
"Dominance",
"Liking",
"Valence-Arousal"
]
class_nums = [2,2,2,2,4]
test_loss_list = [] # 记录每一折验证的loss
test_acc_list = [] # 记录每一折验证的acc
# prepare data
nor_method = 1
label_smooth = 0.1
shuffle = True
# reading the data in the whole dataset
deap = DEAP(individual=individual, normalization=nor_method)
train_X, train_Y = deap.get_train_data()
validate_X, validate_Y = deap.get_validate_data()
test_X, test_Y = deap.get_test_data()
# Hyper-parameters
epochs = 100
batch_size = 512
learning_rate = 5e-6
criterion = LabelSmoothSoftmax(lb_smooth=label_smooth)
# criterion_attn = CrossEntropyLoss()
print("starting subject-dependent training experiments on individual %d class %s" % (
individual, class_list[class_target]))
print("train_X shape", train_X.shape)
print("train_Y shape", train_Y.shape)
print("validate_X shape", validate_X.shape)
print("validate_Y shape", validate_Y.shape)
print("test_X shape", test_X.shape)
print("test_Y shape", test_Y.shape)
train_Y, test_Y, validate_Y = train_Y[:, class_target].squeeze(), test_Y[:, class_target].squeeze(), validate_Y[:, class_target].squeeze()
train_loader = DataLoader(dataset=DEAP_DATASET(train_X, train_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
validate_loader = DataLoader(dataset=DEAP_DATASET(validate_X, validate_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
test_loader = DataLoader(dataset=DEAP_DATASET(test_X, test_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
exp_des = "%d_dependent_%s_%s_%d_%d_%s" % (
individual, 'shuffle' if shuffle else "without_shuffle", 'deap', epochs, batch_size,
class_list[class_target])
print("model construction...")
net = Hierarchical_ATTN_With_Senti_Map(class_num=class_nums[class_target])
# if fine_tuning we continue train the pretrained model
net = net.to(device)
save_model_path = '../../saved_models/%s/deap/subject_%d/%s/' % (
net.__class__.__name__, individual, class_list[class_target])
if not os.path.exists(save_model_path):
os.makedirs(save_model_path)
optimization = Adam(net.parameters(), lr=learning_rate, weight_decay=0.001)
# save model training state
running_loss_list = []
running_acc_list = []
validate_loss_list = []
validate_acc_list = []
best_acc = -1
print("start training...")
scheduler_cosine = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer=optimization, T_max=epochs)
scheduler_warmup = GradualWarmupScheduler(optimizer=optimization, multiplier=10,
total_epoch=np.ceil(0.1 * epochs),
after_scheduler=scheduler_cosine)
for epoch in range(epochs):
net.train()
running_loss = 0.0
correct = 0.0
total = 0.0
for i, (feature, target) in enumerate(train_loader):
feature = feature.reshape(-1, 40, 5)
optimization.zero_grad()
#print("训练集label:{}".format(target))
# print("脏数据统计", torch.sum(torch.isnan(feature), dim=0))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
out = net(eeg, peripheral)
#print("训练集", out.data[:5])
#print("训练集",eeg_attn.shape, eeg_attn.data[:5])
#print("训练集",eye_attn.shape, eye_attn.data[:5])
# print("batch output",out[0])
cross_entropy_loss = criterion(out, target)
# eeg_attn_loss = criterion_attn(eeg_attn, target)
# eye_attn_loss = criterion_attn(eye_attn, target)
#loss = cross_entropy_loss
#print("交叉熵损失", cross_entropy_loss.data)
#print("eeg注意力损失", eeg_attn_loss.data)
#print("eye注意力损失", eeg_attn_loss.data)
cross_entropy_loss.backward()
clip_grad_norm_(net.parameters(), max_norm=10)
# for name, parms in net.named_parameters():
# print('打印梯度')
# print('-->name:', name, '-->grad_requirs:', parms.requires_grad, \
# ' -->grad_value:', parms.grad)
optimization.step()
running_loss += cross_entropy_loss.item()
# print("batch loss", loss.item())
_, prediction = torch.max(out.data, dim=-1)
#print('训练集', prediction[:5])
total += target.size(0)
correct += prediction.eq(target.data).cpu().sum().item()
cur_loss = running_loss / total
cur_acc = correct / total
# print(cur_acc, correct, total)
if isinstance(cur_acc, torch.Tensor):
cur_acc = cur_acc.item()
if isinstance(cur_loss, torch.Tensor):
cur_loss = cur_loss.item()
print('Training Loss: %.10f | Training Acc: %.3f%% (%d/%d)' % (
cur_loss, 100 * cur_acc, correct, total))
running_loss_list.append(cur_loss)
running_acc_list.append(cur_acc)
scheduler_warmup.step()
if epoch % 5 == 0:
net.eval()
print("start evaluating...")
validate_loss = 0.0
validate_correct = 0.0
validate_total = 0.0
for i, (feature, target) in enumerate(validate_loader):
feature = feature.reshape(-1, 40, 5)
#print("训练集label:{}".format(target))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
with torch.no_grad():
out= net(eeg, peripheral)
#print("c集", out.data[:5])
#print("c集", eeg_attn.data[:5])
#print("c集", eye_attn.data[:5])
loss = criterion(out, target)
validate_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
#print('验证集',prediction[:10])
validate_total += target.size(0)
validate_correct += prediction.eq(target.data).cpu().sum().item()
#print("验证集相等:{}".format(prediction.eq(target.data)))
validate_acc = validate_correct / validate_total
validate_loss = validate_loss / validate_total
if isinstance(validate_acc, torch.Tensor):
validate_acc = validate_acc.item()
if isinstance(validate_loss, torch.Tensor):
validate_loss = validate_loss.item()
print('Validate Loss: %.10f | Validate-Acc: %.3f%% (%d/%d)' % (
validate_loss, 100 * validate_acc, validate_correct, validate_total))
validate_acc_list.append(validate_acc)
validate_loss_list.append(validate_loss)
if validate_acc > best_acc:
best_acc = validate_acc
print("better model founded in validating sets, start saving new model")
model_name = '%s' % (net.__class__.__name__)
state = {
'net': net.state_dict(),
'epoch': epoch,
'best_acc': best_acc,
'current_loss': validate_loss
}
torch.save(state, os.path.join(save_model_path, model_name))
# 开始计算测试集
checkpoint = torch.load(os.path.join(save_model_path, net.__class__.__name__))
net.load_state_dict(checkpoint['net'])
print("start evaluating...")
testing_loss = 0.0
test_correct = 0.0
test_total = 0.0
y_pre = []
y_true = []
for i, (feature, target) in enumerate(test_loader):
feature = feature.reshape(-1, 40, 5)
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
y_true.extend(target.cpu().numpy().tolist())
with torch.no_grad():
out = net(eeg, peripheral)
loss = criterion(out, target)
testing_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
y_pre.extend(prediction.cpu().numpy().tolist())
# print(prediction)
test_total += target.size(0)
test_correct += prediction.eq(target.data).cpu().sum().item()
test_acc = test_correct / test_total
test_loss = testing_loss / test_total
if isinstance(test_acc, torch.Tensor):
test_acc = test_acc.item()
if isinstance(test_loss, torch.Tensor):
test_loss = test_loss.item()
print('Test Loss: %.10f | Test Acc: %.3f%% (%d/%d)' % (
test_loss, 100 * test_acc, test_correct, test_total))
test_acc_list.append(test_acc)
test_loss_list.append(test_loss)
plot_acc_loss_curve({'train_loss': running_loss_list,
'train_acc': running_acc_list,
'test_loss': validate_loss_list,
'test_acc': validate_acc_list}, net.__class__.__name__, exp_des)
pd.DataFrame.from_dict({
'test_loss': test_loss_list,
'test_acc': test_acc_list
}).to_csv('./results/deap_individual_%d_%s.csv' % (individual, class_list[class_target]), mode='w', index=False,
header=True, encoding='utf-8')
y_true = np.array(y_true)
y_pre = np.array(y_pre)
with open('./results/{}_classification_reports.txt'.format(class_list[class_target]), 'a+') as f:
f.write("***********Predict results of individual {}***********\n".format(individual))
f.write("classification reports:\n{}\nconfusion matrix:\n{}\noytx_accuracy_score:{}\noytx_precision_score:{}\noytx_recall_score:{}\noytx_f1_score:{}\n".format(classification_report(y_true, y_pre), confusion_matrix(y_true, y_pre),accuracy_score(y_true, y_pre),precision_score(y_true, y_pre, average='macro'),recall_score(y_true, y_pre, average='macro'),f1_score(y_true,y_pre, average='macro')))
f.write("******************************************************\n")
def subject_independent(all_X, all_Y, individual=1, class_target=4):
# 跨个体,留一法
class_list = [
"Valence",
"Arousal",
"Dominance",
"Liking",
"Valence-Arousal"
]
class_nums = [2,2,2,2,4]
test_loss_list = [] # 记录每一折验证的loss
test_acc_list = [] # 记录每一折验证的acc
# prepare data
nor_method = 1
label_smooth = 0.1
shuffle = True
kfold = 5
# reading the data in the whole dataset
# Hyper-parameters
epochs = 120
batch_size = 512
learning_rate = 1e-3
criterion = LabelSmoothSoftmax(lb_smooth=label_smooth)
criterion_attn = CrossEntropyLoss()
print("starting subject-independent training experiments on individual %d class %s" % (
individual, class_list[class_target]))
train_X = np.vstack([item for i, item in enumerate(all_X) if i != individual]).reshape(-1,40,128)
train_Y = np.vstack([item for i, item in enumerate(all_Y) if i != individual]).reshape(-1,5)
test_X = all_X[individual].reshape(-1,40,128)
test_Y = all_Y[individual].reshape(-1,5)
sample_index = list(range(train_X.shape[0]))
if shuffle:
np.random.seed(seed=0)
np.random.shuffle(sample_index)
val_X, val_Y = train_X[sample_index[:int(len(sample_index)*0.3)]], train_Y[sample_index[:int(len(sample_index)*0.3)]]
#train_X, train_Y = train_X[sample_index[int(len(sample_index)*0.3):]], train_Y[sample_index[int(len(sample_index)*0.3):]]
print("train_X shape", train_X.shape)
print("train_Y shape", train_Y.shape)
print("val_X shape", val_X.shape)
print("val_Y shape", val_Y.shape)
print("test_X shape", test_X.shape)
print("test_Y shape", test_Y.shape)
train_Y, test_Y, val_Y = train_Y[:, class_target].squeeze(), test_Y[:, class_target].squeeze(), val_Y[:, class_target].squeeze()
train_loader = DataLoader(dataset=DEAP_DATASET(train_X, train_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
val_loader = DataLoader(dataset=DEAP_DATASET(val_X, val_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
test_loader = DataLoader(dataset=DEAP_DATASET(test_X, test_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
exp_des = "%d_dependent_in_%s_%s_%d_%d_%s" % (
individual, 'shuffle' if shuffle else "without_shuffle", 'deap', epochs, batch_size,
class_list[class_target])
print("model construction...")
net = Hierarchical_ATTN_With_Senti_Map(class_num=class_nums[class_target])
# if fine_tuning we continue train the pretrained model
net = net.to(device)
save_model_path = '../../saved_models/%s/deap_subjuect_independent/subject_%d/%s/' % (
net.__class__.__name__, individual, class_list[class_target])
if not os.path.exists(save_model_path):
os.makedirs(save_model_path)
optimization = Adam(net.parameters(), lr=learning_rate, weight_decay=0.001)
# save model training state
running_loss_list = []
running_acc_list = []
testing_loss_list = []
testing_acc_list = []
best_acc = -1
print("start training...")
scheduler_cosine = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer=optimization, T_max=epochs)
scheduler_warmup = GradualWarmupScheduler(optimizer=optimization, multiplier=10,
total_epoch=np.ceil(0.1 * epochs),
after_scheduler=scheduler_cosine)
for epoch in range(epochs):
net.train()
running_loss = 0.0
correct = 0.0
total = 0.0
for i, (feature, target) in enumerate(train_loader):
optimization.zero_grad()
# print("脏数据统计", torch.sum(torch.isnan(feature), dim=0))
eeg = feature[:, :32]
peripheral = feature[:, 32:]
eeg = eeg.reshape(-1, 32, 128)
eeg = eeg.to(device)
peripheral = peripheral.reshape(-1, 8, 128)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
out, eeg_attn, eye_attn = net(eeg, peripheral)
# print("batch output",out[0])
cross_entropy_loss = criterion(out, target)
eeg_attn_loss = criterion_attn(eeg_attn, target)
eye_attn_loss = criterion_attn(eye_attn, target)
loss = cross_entropy_loss + eeg_attn_loss + eye_attn_loss
loss.backward()
for name, params in optimization.param_groups:
print('打印梯度')
print('-->name:', name, '-->grad_requirs:', params.requires_grad, \
' -->grad_value:', params.grad)
clip_grad_norm_(net.parameters(), max_norm=10)
optimization.step()
running_loss += loss.item()
# print("batch loss", loss.item())
_, prediction = torch.max(out.data, dim=-1)
total += target.size(0)
correct += prediction.eq(target.data).cpu().sum().item()
cur_loss = running_loss / len(train_loader)
cur_acc = correct / total
# print(cur_acc, correct, total)
if isinstance(cur_acc, torch.Tensor):
cur_acc = cur_acc.item()
if isinstance(cur_loss, torch.Tensor):
cur_loss = cur_loss.item()
print('Loss: %.10f | Acc: %.3f%% (%d/%d)' % (
cur_loss, 100 * cur_acc, correct, total))
running_loss_list.append(cur_loss)
running_acc_list.append(cur_acc)
scheduler_warmup.step()
if epoch % 1 == 0:
net.eval()
print("start evaluating...")
test_loss = 0.0
test_correct = 0.0
test_total = 0.0
for i, (feature, target) in enumerate(val_loader):
eeg = feature[:, :32]
peripheral = feature[:, 32:]
eeg = eeg.reshape(-1, 32, 128)
eeg = eeg.to(device)
peripheral = peripheral.reshape(-1, 8, 128)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
with torch.no_grad():
out, eeg_attn, eye_attn = net(eeg, peripheral)
loss = criterion(out, target) + criterion_attn(eeg_attn, target) + criterion_attn(eye_attn,
target)
test_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
# print(prediction)
test_total += target.size(0)
test_correct += prediction.eq(target.data).cpu().sum().item()
test_acc = test_correct / test_total
test_loss = test_loss / len(test_loader)
if isinstance(test_acc, torch.Tensor):
test_acc = test_acc.item()
if isinstance(test_loss, torch.Tensor):
val_loss = test_loss.item()
print('Testset Loss: %.10f | Test-Acc: %.3f%% (%d/%d)' % (
test_loss, 100 * test_acc, test_correct, test_total))
testing_acc_list.append(test_acc)
testing_loss_list.append(test_loss)
if test_acc > best_acc:
best_acc = test_acc
print("better model founded in testsets, start saving new model")
model_name = '%s' % (net.__class__.__name__)
state = {
'net': net.state_dict(),
'epoch': epoch,
'best_acc': best_acc,
'current_loss': test_loss
}
torch.save(state, os.path.join(save_model_path, model_name))
# 开始计算测试集
checkpoint = torch.load(os.path.join(save_model_path, net.__class__.__name__))
net.load_state_dict(checkpoint['net'])
print("start evaluating...")
testing_loss = 0.0
test_correct = 0.0
test_total = 0.0
for i, (feature, target) in enumerate(test_loader):
eeg = feature[:, :32]
peripheral = feature[:, 32:]
eeg = eeg.reshape(-1, 32, 128)
eeg = eeg.to(device)
peripheral = peripheral.reshape(-1, 8, 128)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
with torch.no_grad():
out, eeg_attn, eye_attn = net(eeg, peripheral)
loss = criterion(out, target) + criterion_attn(eeg_attn, target) + criterion_attn(eye_attn, target)
testing_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
# print(prediction)
test_total += target.size(0)
test_correct += prediction.eq(target.data).cpu().sum().item()
test_acc = test_correct / test_total
test_loss = testing_loss / len(test_loader)
if isinstance(test_acc, torch.Tensor):
test_acc = test_acc.item()
if isinstance(test_loss, torch.Tensor):
test_loss = test_loss.item()
print('Testset Loss: %.10f | Acc: %.3f%% (%d/%d)' % (
test_loss, 100 * test_acc, test_correct, test_total))
test_acc_list.append(test_acc)
test_loss_list.append(test_loss)
plot_acc_loss_curve({'train_loss': running_loss_list,
'train_acc': running_acc_list,
'test_loss': testing_loss_list,
'test_acc': testing_acc_list}, net.__class__.__name__, exp_des)
pd.DataFrame.from_dict({
'test_loss': test_loss_list,
'test_acc': test_acc_list
}).to_csv('./subject_independent_results/deap_individual_%d_%s.csv' % (individual, class_list[class_target]), mode='w', index=False,
header=True, encoding='utf-8')
def subject_dependent_k_fold(individual=1, class_target=4, k_fold = 5):
# k-fold cv
class_list = [
"Valence",
"Arousal",
"Dominance",
"Liking",
"Valence-Arousal"
]
class_nums = [2, 2, 2, 2, 4]
test_loss_list = [] # 记录每一折验证的loss
test_acc_list = [] # 记录每一折验证的acc
test_precision_list = [] # 记录每一折验证的precision
test_recall_list = [] # 记录每一折验证的recall
test_f1_list = [] #记录每一折验证的f1
test_accuray_list = [] # 记录每一折验证的准确率
# prepare data
nor_method = 0
label_smooth = 0.3
shuffle = True
# reading the data in the whole dataset
deap = DEAP(individual=individual)
k_fold_data = deap.get_kfold_X_Y2(k_fold)
for fold, (train_X, train_Y, test_X, test_Y) in enumerate(k_fold_data):
print("start {} th cross-validation".format(fold))
train_X, train_Y, test_X, test_Y = deap_normalization(train_X, train_Y, test_X, test_Y, nor_method=1, merge=2,
column=0)
# Hyper-parameters
epochs = 80
batch_size = 512
learning_rate = 1e-4
criterion = LabelSmoothSoftmax(lb_smooth=label_smooth)
# criterion_attn = CrossEntropyLoss()
print("starting subject-dependent %d-th CV training experiments on individual %d class %s" % (fold,
individual, class_list[class_target]))
print("train_X shape", train_X.shape)
print("train_Y shape", train_Y.shape)
print("test_X shape", test_X.shape)
print("test_Y shape", test_Y.shape)
train_Y, test_Y = train_Y[:, class_target].squeeze(), test_Y[:, class_target].squeeze()
print("{}-th CV\t train X shape {}\n".format(fold, train_X.shape))
print("{}-th CV\t train Y shape {}\n".format(fold, train_Y.shape))
print("{}-th CV\t test X shape {}\n".format(fold, test_X.shape))
print("{}-th CV\t test Y shape {}\n".format(fold, test_Y.shape))
print("train Y == 0\t{}".format(sum(train_Y == 0)))
print("train Y == 1\t{}".format(sum(train_Y == 1)))
print("train Y == 2\t{}".format(sum(train_Y == 2)))
print("train Y == 3\t{}".format(sum(train_Y == 3)))
print("test Y == 0\t{}".format(sum(test_Y == 0)))
print("test Y == 1\t{}".format(sum(test_Y == 1)))
print("test Y == 2\t{}".format(sum(test_Y == 2)))
print("test Y == 3\t{}".format(sum(test_Y == 3)))
train_loader = DataLoader(dataset=DEAP_DATASET(train_X, train_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
test_loader = DataLoader(dataset=DEAP_DATASET(test_X, test_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
exp_des = "%d_dependent_%dth_cv_%s_%s_%d_%d_%s" % (
individual, fold,'shuffle' if shuffle else "without_shuffle", 'deap', epochs, batch_size,
class_list[class_target])
print("model construction...")
net = Hierarchical_ATTN_With_Senti_Map(class_num=class_nums[class_target])
# if fine_tuning we continue train the pretrained model
net = net.to(device)
save_model_path = '../../saved_models/%s/deap/subject_%d/%s/fold%d' % (
net.__class__.__name__, individual, class_list[class_target], fold)
if not os.path.exists(save_model_path):
os.makedirs(save_model_path)
optimization = RMSprop(net.parameters(), lr=learning_rate, weight_decay=0.01)
running_loss_list = []
running_acc_list = []
validate_loss_list = []
validate_acc_list = []
best_acc = -1
print("start training...")
scheduler_cosine = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer=optimization, T_max=epochs)
scheduler_warmup = GradualWarmupScheduler(optimizer=optimization, multiplier=10,
total_epoch=np.ceil(0.1 * epochs),
after_scheduler=scheduler_cosine)
for epoch in range(epochs):
net.train()
running_loss = 0.0
correct = 0.0
total = 0.0
for i, (feature, target) in enumerate(train_loader):
feature = feature.reshape(-1, 40, 5)
optimization.zero_grad()
# print("训练集label:{}".format(target))
# print("脏数据统计", torch.sum(torch.isnan(feature), dim=0))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
#print(eeg.shape, peripheral.shape)
out = net(eeg, peripheral)
# print("训练集", out.data[:5])
# print("训练集",eeg_attn.shape, eeg_attn.data[:5])
# print("训练集",eye_attn.shape, eye_attn.data[:5])
# print("batch output",out[0])
cross_entropy_loss = criterion(out, target)
# eeg_attn_loss = criterion_attn(eeg_attn, target)
# eye_attn_loss = criterion_attn(eye_attn, target)
# loss = cross_entropy_loss
# print("交叉熵损失", cross_entropy_loss.data)
# print("eeg注意力损失", eeg_attn_loss.data)
# print("eye注意力损失", eeg_attn_loss.data)
cross_entropy_loss.backward()
clip_grad_norm_(net.parameters(), max_norm=10)
# for name, parms in net.named_parameters():
# print('打印梯度')
# print('-->name:', name, '-->grad_requirs:', parms.requires_grad, \
# ' -->grad_value:', parms.grad)
optimization.step()
running_loss += cross_entropy_loss.item()
# print("batch loss", loss.item())
_, prediction = torch.max(out.data, dim=-1)
# print('训练集', prediction[:5])
total += target.size(0)
correct += prediction.eq(target.data).cpu().sum().item()
cur_loss = running_loss / total
cur_acc = correct / total
# print(cur_acc, correct, total)
if isinstance(cur_acc, torch.Tensor):
cur_acc = cur_acc.item()
if isinstance(cur_loss, torch.Tensor):
cur_loss = cur_loss.item()
print('Training Loss: %.10f | Training Acc: %.3f%% (%d/%d)' % (
cur_loss, 100 * cur_acc, correct, total))
running_loss_list.append(cur_loss)
running_acc_list.append(cur_acc)
scheduler_warmup.step()
if epoch % 5 == 0:
net.eval()
print("start evaluating...")
validate_loss = 0.0
validate_correct = 0.0
validate_total = 0.0
for i, (feature, target) in enumerate(test_loader):
feature = feature.reshape(-1, 40, 5)
# print("训练集label:{}".format(target))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
with torch.no_grad():
out = net(eeg, peripheral)
# print("c集", out.data[:5])
# print("c集", eeg_attn.data[:5])
# print("c集", eye_attn.data[:5])
loss = criterion(out, target)
validate_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
# print('验证集',prediction[:10])
validate_total += target.size(0)
validate_correct += prediction.eq(target.data).cpu().sum().item()
# print("验证集相等:{}".format(prediction.eq(target.data)))
validate_acc = validate_correct / validate_total
validate_loss = validate_loss / validate_total
if isinstance(validate_acc, torch.Tensor):
validate_acc = validate_acc.item()
if isinstance(validate_loss, torch.Tensor):
validate_loss = validate_loss.item()
print('Validate Loss: %.10f | Validate-Acc: %.3f%% (%d/%d)' % (
validate_loss, 100 * validate_acc, validate_correct, validate_total))
validate_acc_list.append(validate_acc)
validate_loss_list.append(validate_loss)
if validate_acc > best_acc:
best_acc = validate_acc
print("better model founded in validating sets, start saving new model")
model_name = '%s' % (net.__class__.__name__)
state = {
'net': net.state_dict(),
'epoch': epoch,
'best_acc': best_acc,
'current_loss': validate_loss
}
torch.save(state, os.path.join(save_model_path, model_name))
# 开始计算测试集
checkpoint = torch.load(os.path.join(save_model_path, net.__class__.__name__))
net.load_state_dict(checkpoint['net'])
net.eval()
print("start evaluating...")
testing_loss = 0.0
test_correct = 0.0
test_total = 0.0
y_pre = []
y_true = []
for i, (feature, target) in enumerate(test_loader):
feature = feature.reshape(-1, 40, 5)
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 5)
peripheral = peripheral.reshape(-1, 8, 5)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
y_true.extend(target.cpu().numpy().tolist())
with torch.no_grad():
out = net(eeg, peripheral)
loss = criterion(out, target)
testing_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
y_pre.extend(prediction.cpu().numpy().tolist())
# print(prediction)
test_total += target.size(0)
test_correct += prediction.eq(target.data).cpu().sum().item()
test_acc = test_correct / test_total
test_loss = testing_loss / test_total
if isinstance(test_acc, torch.Tensor):
test_acc = test_acc.item()
if isinstance(test_loss, torch.Tensor):
test_loss = test_loss.item()
print('Test Loss: %.10f | Test Acc: %.3f%% (%d/%d)' % (
test_loss, 100 * test_acc, test_correct, test_total))
plot_acc_loss_curve({'train_loss': running_loss_list,
'train_acc': running_acc_list,
'test_loss': validate_loss_list,
'test_acc': validate_acc_list}, net.__class__.__name__, exp_des)
y_true = np.array(y_true)
y_pre = np.array(y_pre)
test_acc_list.append(test_acc)
test_loss_list.append(test_loss)
test_precision_list.append(precision_score(y_true, y_pre, average='macro'))
test_recall_list.append(recall_score(y_true, y_pre, average='macro'))
test_f1_list.append(f1_score(y_true, y_pre, average='macro'))
test_accuray_list.append(accuracy_score(y_true, y_pre))
with open('./cv_results/{}_classification_reports.txt'.format(class_list[class_target]), 'a+') as f:
f.write("*********** {}-th CV Predict results of individual {}***********\n".format(fold, individual))
f.write(
"classification reports:\n{}\nconfusion matrix:\n{}\noytx_accuracy_score:{}\noytx_precision_score:{}\noytx_recall_score:{}\noytx_f1_score:{}\n".format(
classification_report(y_true, y_pre), confusion_matrix(y_true, y_pre),
accuracy_score(y_true, y_pre), precision_score(y_true, y_pre, average='macro'),
recall_score(y_true, y_pre, average='macro'), f1_score(y_true, y_pre, average='macro')))
f.write("******************************************************\n")
df = pd.DataFrame.from_dict({
'test_loss': test_loss_list,
'test_acc': test_acc_list,
"test_accuracy":test_accuray_list,
"test_precision":test_precision_list,
"test_recall":test_recall_list,
"test_f1":test_f1_list
})
df_mean = df.mean()
df_std = df.std()
df = df.append(df_mean, ignore_index=True)
df = df.append(df_std, ignore_index=True)
df.to_csv('./cv_results/deap_individual_%d_%s.csv' % (individual, class_list[class_target]), mode='w', index=False,
header=True, encoding='utf-8')
def subject_dependent_k_fold_128(individual=1, class_target=4, k_fold=5):
# k-fold cv
class_list = [
"Valence",
"Arousal",
"Dominance",
"Liking",
"Valence-Arousal"
]
class_nums = [2, 2, 2, 2, 4]
test_loss_list = [] # 记录每一折验证的loss
test_acc_list = [] # 记录每一折验证的acc
test_precision_list = [] # 记录每一折验证的precision
test_recall_list = [] # 记录每一折验证的recall
test_f1_list = [] # 记录每一折验证的f1
test_accuray_list = [] # 记录每一折验证的准确率
# prepare data
nor_method = 0
label_smooth = 0.1
shuffle = True
# reading the data in the whole dataset
deap = DEAP128(individual=individual)
k_fold_data = deap.get_kfold_X_Y2(k_fold)
for fold, (train_X, train_Y, test_X, test_Y) in enumerate(k_fold_data):
print("start {} th cross-validation".format(fold))
train_X, train_Y, test_X, test_Y = deap_normalization(train_X, train_Y, test_X, test_Y, nor_method=0, merge=1,
column=0)
# Hyper-parameters
epochs = 150
batch_size = 512
learning_rate = 1e-4
criterion = LabelSmoothSoftmax(lb_smooth=label_smooth)
# criterion_attn = CrossEntropyLoss()
print("starting subject-dependent %d-th CV training experiments on individual %d class %s" % (fold,
individual,
class_list[
class_target]))
print("train_X shape", train_X.shape)
print("train_Y shape", train_Y.shape)
print("test_X shape", test_X.shape)
print("test_Y shape", test_Y.shape)
train_Y, test_Y = train_Y[:, class_target].squeeze(), test_Y[:, class_target].squeeze()
print("{}-th CV\t train X shape {}\n".format(fold, train_X.shape))
print("{}-th CV\t train Y shape {}\n".format(fold, train_Y.shape))
print("{}-th CV\t test X shape {}\n".format(fold, test_X.shape))
print("{}-th CV\t test Y shape {}\n".format(fold, test_Y.shape))
print("train Y == 0\t{}".format(sum(train_Y == 0)))
print("train Y == 1\t{}".format(sum(train_Y == 1)))
print("train Y == 2\t{}".format(sum(train_Y == 2)))
print("train Y == 3\t{}".format(sum(train_Y == 3)))
print("test Y == 0\t{}".format(sum(test_Y == 0)))
print("test Y == 1\t{}".format(sum(test_Y == 1)))
print("test Y == 2\t{}".format(sum(test_Y == 2)))
print("test Y == 3\t{}".format(sum(test_Y == 3)))
train_loader = DataLoader(dataset=DEAP_DATASET(train_X, train_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
test_loader = DataLoader(dataset=DEAP_DATASET(test_X, test_Y), batch_size=batch_size, shuffle=shuffle,
num_workers=0)
exp_des = "%d_dependent_%dth_cv_%s_%s_%d_%d_%s" % (
individual, fold, 'shuffle' if shuffle else "without_shuffle", 'deap', epochs, batch_size,
class_list[class_target])
print("model construction...")
net = Hierarchical_ATTN_With_Senti_Map(class_num=class_nums[class_target])
# if fine_tuning we continue train the pretrained model
net = net.to(device)
save_model_path = '../../saved_models/%s/deap/subject_%d/%s/fold%d' % (
net.__class__.__name__, individual, class_list[class_target], fold)
if not os.path.exists(save_model_path):
os.makedirs(save_model_path)
optimization = RMSprop(net.parameters(), lr=learning_rate, weight_decay=0.01)
running_loss_list = []
running_acc_list = []
validate_loss_list = []
validate_acc_list = []
best_acc = -1
print("start training...")
scheduler_cosine = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer=optimization, T_max=epochs)
scheduler_warmup = GradualWarmupScheduler(optimizer=optimization, multiplier=10,
total_epoch=np.ceil(0.1 * epochs),
after_scheduler=scheduler_cosine)
for epoch in range(epochs):
net.train()
running_loss = 0.0
correct = 0.0
total = 0.0
for i, (feature, target) in enumerate(train_loader):
feature = feature.reshape(-1, 40, 128)
optimization.zero_grad()
# print("训练集label:{}".format(target))
# print("脏数据统计", torch.sum(torch.isnan(feature), dim=0))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 128)
peripheral = peripheral.reshape(-1, 8, 128)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
# print(eeg.shape, peripheral.shape)
out = net(eeg, peripheral)
# print("训练集", out.data[:5])
# print("训练集",eeg_attn.shape, eeg_attn.data[:5])
# print("训练集",eye_attn.shape, eye_attn.data[:5])
# print("batch output",out[0])
cross_entropy_loss = criterion(out, target)
# eeg_attn_loss = criterion_attn(eeg_attn, target)
# eye_attn_loss = criterion_attn(eye_attn, target)
# loss = cross_entropy_loss
# print("交叉熵损失", cross_entropy_loss.data)
# print("eeg注意力损失", eeg_attn_loss.data)
# print("eye注意力损失", eeg_attn_loss.data)
cross_entropy_loss.backward()
clip_grad_norm_(net.parameters(), max_norm=10)
# for name, parms in net.named_parameters():
# print('打印梯度')
# print('-->name:', name, '-->grad_requirs:', parms.requires_grad, \
# ' -->grad_value:', parms.grad)
optimization.step()
running_loss += cross_entropy_loss.item()
# print("batch loss", loss.item())
_, prediction = torch.max(out.data, dim=-1)
# print('训练集', prediction[:5])
total += target.size(0)
correct += prediction.eq(target.data).cpu().sum().item()
cur_loss = running_loss / total
cur_acc = correct / total
# print(cur_acc, correct, total)
if isinstance(cur_acc, torch.Tensor):
cur_acc = cur_acc.item()
if isinstance(cur_loss, torch.Tensor):
cur_loss = cur_loss.item()
print('Training Loss: %.10f | Training Acc: %.3f%% (%d/%d)' % (
cur_loss, 100 * cur_acc, correct, total))
running_loss_list.append(cur_loss)
running_acc_list.append(cur_acc)
scheduler_warmup.step()
if epoch % 5 == 0:
net.eval()
print("start evaluating...")
validate_loss = 0.0
validate_correct = 0.0
validate_total = 0.0
for i, (feature, target) in enumerate(test_loader):
feature = feature.reshape(-1, 40, 128)
# print("训练集label:{}".format(target))
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 128)
peripheral = peripheral.reshape(-1, 8, 128)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
with torch.no_grad():
out = net(eeg, peripheral)
# print("c集", out.data[:5])
# print("c集", eeg_attn.data[:5])
# print("c集", eye_attn.data[:5])
loss = criterion(out, target)
validate_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
# print('验证集',prediction[:10])
validate_total += target.size(0)
validate_correct += prediction.eq(target.data).cpu().sum().item()
# print("验证集相等:{}".format(prediction.eq(target.data)))
print("classification reports {}\n".format(classification_report(target.cpu().numpy(), prediction.cpu().numpy())))
print("confusion matrix {}".format(
confusion_matrix(target.cpu().numpy(), prediction.cpu().numpy())))
validate_acc = validate_correct / validate_total
validate_loss = validate_loss / validate_total
if isinstance(validate_acc, torch.Tensor):
validate_acc = validate_acc.item()
if isinstance(validate_loss, torch.Tensor):
validate_loss = validate_loss.item()
print('Validate Loss: %.10f | Validate-Acc: %.3f%% (%d/%d)' % (
validate_loss, 100 * validate_acc, validate_correct, validate_total))
validate_acc_list.append(validate_acc)
validate_loss_list.append(validate_loss)
if validate_acc > best_acc:
best_acc = validate_acc
print("better model founded in validating sets, start saving new model")
model_name = '%s' % (net.__class__.__name__)
state = {
'net': net.state_dict(),
'epoch': epoch,
'best_acc': best_acc,
'current_loss': validate_loss
}
torch.save(state, os.path.join(save_model_path, model_name))
# 开始计算测试集
checkpoint = torch.load(os.path.join(save_model_path, net.__class__.__name__))
net.load_state_dict(checkpoint['net'])
net.eval()
print("start evaluating...")
testing_loss = 0.0
test_correct = 0.0
test_total = 0.0
y_pre = []
y_true = []
for i, (feature, target) in enumerate(test_loader):
feature = feature.reshape(-1, 40, 128)
eeg = feature[:, :32, :]
peripheral = feature[:, 32:, :]
eeg = eeg.reshape(-1, 32, 128)
peripheral = peripheral.reshape(-1, 8, 128)
eeg = eeg.to(device)
peripheral = peripheral.to(device)
target = target.type(torch.LongTensor).to(device)
y_true.extend(target.cpu().numpy().tolist())
with torch.no_grad():
out = net(eeg, peripheral)
loss = criterion(out, target)
testing_loss += loss.item()
_, prediction = torch.max(out.data, dim=-1)
y_pre.extend(prediction.cpu().numpy().tolist())
# print(prediction)
test_total += target.size(0)
test_correct += prediction.eq(target.data).cpu().sum().item()
test_acc = test_correct / test_total
test_loss = testing_loss / test_total
if isinstance(test_acc, torch.Tensor):
test_acc = test_acc.item()
if isinstance(test_loss, torch.Tensor):
test_loss = test_loss.item()
print('Test Loss: %.10f | Test Acc: %.3f%% (%d/%d)' % (
test_loss, 100 * test_acc, test_correct, test_total))
plot_acc_loss_curve({'train_loss': running_loss_list,
'train_acc': running_acc_list,
'test_loss': validate_loss_list,
'test_acc': validate_acc_list}, net.__class__.__name__, exp_des)
y_true = np.array(y_true)
y_pre = np.array(y_pre)
test_acc_list.append(test_acc)
test_loss_list.append(test_loss)
test_precision_list.append(precision_score(y_true, y_pre, average='macro'))
test_recall_list.append(recall_score(y_true, y_pre, average='macro'))
test_f1_list.append(f1_score(y_true, y_pre, average='macro'))
test_accuray_list.append(accuracy_score(y_true, y_pre))
with open('./cv_results/{}_classification_reports.txt'.format(class_list[class_target]), 'a+') as f:
f.write("*********** {}-th CV Predict results of individual {}***********\n".format(fold, individual))
f.write(
"classification reports:\n{}\nconfusion matrix:\n{}\noytx_accuracy_score:{}\noytx_precision_score:{}\noytx_recall_score:{}\noytx_f1_score:{}\n".format(
classification_report(y_true, y_pre), confusion_matrix(y_true, y_pre),
accuracy_score(y_true, y_pre), precision_score(y_true, y_pre, average='macro'),
recall_score(y_true, y_pre, average='macro'), f1_score(y_true, y_pre, average='macro')))
f.write("******************************************************\n")
df = pd.DataFrame.from_dict({
'test_loss': test_loss_list,
'test_acc': test_acc_list,
"test_accuracy": test_accuray_list,
"test_precision": test_precision_list,
"test_recall": test_recall_list,
"test_f1": test_f1_list
})
df_mean = df.mean()
df_std = df.std()
df = df.append(df_mean, ignore_index=True)
df = df.append(df_std, ignore_index=True)
df.to_csv('./cv_results/deap_individual_%d_%s.csv' % (individual, class_list[class_target]), mode='w', index=False,
header=True, encoding='utf-8')
if __name__ == '__main__':
# for c in [4, 0, 1, 2, 3]:
for i in range(1, 33):
subject_dependent_k_fold(i, class_target=0, k_fold=10)
print("experiments done...")
# subject_dependent(11, class_target=4)
# # subject-independent
# _x_list = []
# _y_list = []
# for i in range(1, 33):
# deap = DEAP(individual=i, normalization=1)
# X, Y = deap.get_X_Y()
# X = X.transpose((0,2,1,3))
# X = X.reshape(-1, 128)
# Y = Y.reshape(-1, 5)
# _x_list.append(X)
# _y_list.append(Y)
# print(X.shape)
# for c in [4, 0, 1, 2, 3]:
# for j in range(1,33):
# subject_independent(_x_list, _y_list, individual=j, class_target=c)
#
# # main(1, 'subject_dependent')
# print("experiment done!")
| 47.799763
| 401
| 0.588566
| 10,276
| 80,686
| 4.348774
| 0.053717
| 0.021259
| 0.019424
| 0.00584
| 0.836668
| 0.814693
| 0.804176
| 0.795202
| 0.779807
| 0.772847
| 0
| 0.02164
| 0.285799
| 80,686
| 1,688
| 402
| 47.799763
| 0.753844
| 0.124235
| 0
| 0.719715
| 0
| 0
| 0.069694
| 0.016926
| 0
| 0
| 0
| 0
| 0.003167
| 1
| 0.022961
| false
| 0.000792
| 0.014252
| 0
| 0.056215
| 0.068884
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7060a76727aea70cf5fa920732e34b66bd1121e6
| 25,787
|
py
|
Python
|
levenshtein_test.py
|
vcbin/SparseDamerauLevenshteinAutomaton
|
09ad573c1201a9c03dfbc707323acfa2f3999a29
|
[
"MIT"
] | 16
|
2016-08-04T10:04:26.000Z
|
2021-05-14T05:15:32.000Z
|
levenshtein_test.py
|
vcbin/SparseDamerauLevenshteinAutomaton
|
09ad573c1201a9c03dfbc707323acfa2f3999a29
|
[
"MIT"
] | null | null | null |
levenshtein_test.py
|
vcbin/SparseDamerauLevenshteinAutomaton
|
09ad573c1201a9c03dfbc707323acfa2f3999a29
|
[
"MIT"
] | 6
|
2016-12-21T08:26:26.000Z
|
2020-08-20T12:16:55.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ref
# http://julesjacobs.github.io/2015/06/17/disqus-levenshtein-simple-and-fast.html
class LevenshteinAutomaton(object):
def __init__(self, string, n, weight=1.0):
self.string = string
self.max_edits = n
self.weight = weight
def get_string(self):
return self.string
def get_n(self):
return self.n
def get_weight(self):
return self.weight
def start(self):
return range(len(self.string) + 1)
def step(self, state, c):
new_state = [state[0] + 1]
for i in range(len(state) - 1):
cost = 0 if self.string[i] == c else 1
new_state.append(
min(new_state[i] + 1, state[i] + cost, state[i + 1] + 1))
return [min(x, self.max_edits + 1) for x in new_state]
def is_match(self, state):
return state[-1] <= self.max_edits
def can_match(self, state):
return min(state) <= self.max_edits
def match_error(self, state):
return state[-1]
def transitions(self, state):
return set(c for (i, c) in enumerate(self.string) if state[i] <= self.max_edits)
class SparseLevenshteinAutomaton(object):
def __init__(self, string, n, weight=1.0):
self.string = string
self.max_edits = n
self.weight = weight
def get_string(self):
return self.string
def get_n(self):
return self.n
def get_weight(self):
return self.weight
def start(self):
return (range(self.max_edits + 1), range(self.max_edits + 1))
# return (range(min(self.max_edits, len(self.string)) + 1),
# range(min(self.max_edits, len(self.string) + 1)))
def step(self, (indices, values), c):
if indices and indices[0] == 0 and values[0] < self.max_edits:
new_indices = [0]
new_values = [values[0] + 1]
else:
new_indices = []
new_values = []
for j, i in enumerate(indices):
if i == len(self.string):
break
cost = 0 if self.string[i] == c else 1
val = values[j] + cost
if new_indices and new_indices[-1] == i:
val = min(val, new_values[-1] + 1)
if j + 1 < len(indices) and indices[j + 1] == i + 1:
val = min(val, values[j + 1] + 1)
if val <= self.max_edits:
new_indices.append(i + 1)
new_values.append(val)
return (new_indices, new_values)
def is_match(self, (indices, values)):
return bool(indices) and indices[-1] == len(self.string)
def can_match(self, (indices, values)):
return bool(indices)
def match_error(self, state):
return state[1][-1] # state is tuple of (key_list, value_list)
def transitions(self, (indices, values)):
return set(self.string[i] for i in indices if i < len(self.string))
def explore(lev, state, states,
counter, matching, transitions):
# lists can't be hashed in Python because they are mutable, so convert to
# a tuple
key = (tuple(state[0]), tuple(state[1]))
if key in states:
return states[key]
i = counter[0]
counter[0] += 1
states[key] = i
if lev.is_match(state):
matching.append(i)
for c in lev.transitions(state) | set(['*']):
newstate = lev.step(state, c)
j = explore(lev, newstate, states, counter, matching, transitions)
transitions.append((i, j, c))
return i
def state_equal(dense_state, sparse_state):
# print type(dense_s), type(sparse_state)
indices, values = sparse_state
# print type(indices), type(values)
assert(len(indices) == len(values))
# print indices, values
sparse_len = len(indices)
for i in range(sparse_len):
# print "i %d, indices[i] %d, len(dense_s) %d, len(values) %d" \
# % (i, indices[i], len(dense_s), len(values))
assert(indices[i] < len(dense_state))
if values[i] != dense_state[indices[i]]:
# print "len(dense_state)= %d" % len(dense_state)
print
print "%s\t(%s, %s)" % (dense_state, indices, values)
return False
return True
class DamerauLevenshteinAutomaton(LevenshteinAutomaton):
''' # ref https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance'''
def __init__(self, word, n, state=[], c=u""):
super(DamerauLevenshteinAutomaton, self).__init__(word, n)
self._prev_state = state
self._prev_c = c
# def step(self, state, c, prev_state, prev_c):
# '''simple implementation of Optimal string alignment distance algorithm of wikipedia
# the optimal string alignment algorithm computes the number of edit operations needed to make the strings equal under the condition that NO SUBSTRING is EDITED MORE THAN ONCE
# Note that for the optimal string alignment distance,
# the triangle inequality does not hold: OSA(CA,AC) + OSA(AC,ABC) < OSA(CA,ABC), and so it is not a true metric.'''
# assert(state)
# if prev_state:
# assert(len(prev_state) == len(state))
# new_state = [state[0] + 1]
# for i in range(len(state) - 1):
# cost = 0 if self.string[i] == c else 1
# cur_min = min(new_state[i] + 1, state[i] + cost, state[i + 1] + 1)
# # print
# # print ("prev_state= %s\t, i= %d, c='%s', prev_c='%s', self.string='%s'" % (prev_state, i, c, prev_c, self.string))
# if prev_state and len(prev_c) and i > 0 and \
# self.string[i - 1] == c and self.string[i] == prev_c:
# # cost == 0 when c == prev_c == self.string[i] == self.string[i - 1]
# # assert (prev_state)
# # print
# # print ("prev_state= %s\t, i= %d, c='%s', prev_c='%s', self.string='%s'" % (prev_state, i, c, prev_c, self.string))
# cur_min = min(cur_min, prev_state[i - 1] + cost)
# new_state.append(cur_min)
# assert(len(new_state) == len(self.string) + 1)
# res_state = [min(x, self.max_edits + 1) for x in new_state]
# # print "len(prev_state): %d\t%s\t%s" % (len(prev_state), prev_state, prev_c)
# # print "len(res_state)= %d\t%s\t%s" % (len(res_state), res_state, c)
# # print
# return res_state
def step(self, state, c):
'''simple implementation of Optimal string alignment distance algorithm of wikipedia
the optimal string alignment algorithm computes the number of edit operations needed to make the strings equal under the condition that NO SUBSTRING is EDITED MORE THAN ONCE
Note that for the optimal string alignment distance,
the triangle inequality does not hold: OSA(CA,AC) + OSA(AC,ABC) < OSA(CA,ABC), and so it is not a true metric.'''
assert(state)
prev_state = self._prev_state
if prev_state:
assert(len(prev_state) == len(state))
prev_c = self._prev_c
new_state = [state[0] + 1]
for i in range(len(state) - 1):
cost = 0 if self.string[i] == c else 1
cur_min = min(new_state[i] + 1, state[i] + cost, state[i + 1] + 1)
# print
# print ("prev_state= %s\t, i= %d, c='%s', prev_c='%s', self.string='%s'" % (prev_state, i, c, prev_c, self.string))
if prev_state and len(prev_c) and i > 0 and \
self.string[i - 1] == c and self.string[i] == prev_c:
# cost == 0 when c == prev_c == self.string[i] == self.string[i - 1]
# assert (prev_state)
# print
# print ("prev_state= %s\t, i= %d, c='%s', prev_c='%s', self.string='%s'" % (prev_state, i, c, prev_c, self.string))
cur_min = min(cur_min, prev_state[i - 1] + cost)
new_state.append(cur_min)
assert(len(new_state) == len(self.string) + 1)
res_state = [min(x, self.max_edits + 1) for x in new_state]
self._prev_state = state
self._prev_c = c
# print "len(prev_state): %d\t%s\t%s" % (len(prev_state), prev_state, prev_c)
# print "len(res_state)= %d\t%s\t%s" % (len(res_state), res_state, c)
# print
return res_state
def clear_state(self):
"""clear internal previous state and previous query character variables
:returns: None
"""
self._prev_state = []
self._prev_c = u""
class SparseDamerauLevenshteinAutomaton(SparseLevenshteinAutomaton):
"""SparseDamerauLevenshteinAutomaton implemented by optimal string alignment algorithm
Note that this class is NOT thread safe"""
def __init__(self, word, n, state=([], []), c=u""):
super(SparseDamerauLevenshteinAutomaton, self).__init__(word, n)
self._prev_state = state
self._prev_c = c
# def step(self, (indices, values), c, (prev_indices, prev_values), prev_c):
# if indices and indices[0] == 0 and values[0] < self.max_edits:
# new_indices = [0]
# new_values = [values[0] + 1]
# else:
# new_indices = []
# new_values = []
# for j, i in enumerate(indices):
# if i == len(self.string):
# break
# cost = 0 if self.string[i] == c else 1
# val = values[j] + cost
# if new_indices and new_indices[-1] == i:
# val = min(val, new_values[-1] + 1)
# if j + 1 < len(indices) and indices[j + 1] == i + 1:
# val = min(val, values[j + 1] + 1)
# # print
# # print ("prev_state=(%s, %s)\ti= %d, j= %d\t\t, prev_c='%s', c='%s'\tself.string='%s'" % (prev_indices, prev_values, i, j, prev_c, c, self.string))
# if prev_indices and \
# i > 0 and i < len(self.string) and \
# self.string[i - 1] == c and self.string[i] == prev_c:
# idx = None
# for k, l in enumerate(prev_indices):
# if l == i - 1:
# idx = k
# break
# assert(idx is not None)
# val = min(val, prev_values[idx] + cost)
# # print
# # print ('''prev_state=(%s, %s)\ti= %d, j= %d\t
# # idx = %d, prev_values[idx]= %d, cost= %d, val= %d\tprev_c=\'%s\', c=\'%s\'\tself.string=\'%s\''''
# # % (prev_indices, prev_values, i, j, idx, prev_values[idx], cost, val,
# # prev_c, c, self.string))
# if val <= self.max_edits:
# new_indices.append(i + 1)
# new_values.append(val)
# return (new_indices, new_values)
def step(self, (indices, values), c):
if indices and indices[0] == 0 and values[0] < self.max_edits:
new_indices = [0]
new_values = [values[0] + 1]
else:
new_indices = []
new_values = []
prev_state = self._prev_state
prev_indices, prev_values = prev_state
prev_c = self._prev_c
for j, i in enumerate(indices):
if i == len(self.string):
break
cost = 0 if self.string[i] == c else 1
val = values[j] + cost
if new_indices and new_indices[-1] == i:
val = min(val, new_values[-1] + 1)
if j + 1 < len(indices) and indices[j + 1] == i + 1:
val = min(val, values[j + 1] + 1)
# print
# print ("prev_state=(%s, %s)\tstate=(%s, %s)\ti= %d, j= %d\t\t, prev_c='%s', c='%s'\tself.string='%s'" % (prev_indices, prev_values, indices, values, i, j, prev_c, c, self.string))
if prev_indices and \
i > 0 and i < len(self.string) and \
self.string[i - 1] == c and self.string[i] == prev_c:
idx = None
# look around first
if j < len(prev_indices) and j >= 0 and prev_indices[j] == i - 1:
idx = j
elif j - 1 < len(prev_indices) and j - 1 >= 0 and prev_indices[j - 1] == i - 1:
idx = j - 1
elif j + 1 < len(prev_indices) and j + 1 >= 0 and prev_indices[j + 1] == i - 1:
idx = j + 1
else:
for k, l in enumerate(prev_indices):
if l == i - 1:
idx = k
break
assert(idx is not None)
val = min(val, prev_values[idx] + cost)
# print
# print ('''prev_state=(%s, %s)\ti= %d, j= %d\t
# idx = %d, prev_values[idx]= %d, cost= %d, val= %d\tprev_c=\'%s\', c=\'%s\'\tself.string=\'%s\''''
# % (prev_indices, prev_values, i, j, idx, prev_values[idx], cost, val,
# prev_c, c, self.string))
if val <= self.max_edits:
new_indices.append(i + 1)
new_values.append(val)
self._prev_state = (indices, values)
self._prev_c = c
return (new_indices, new_values)
def clear_state(self):
"""clear internal previous state and previous query character variables
:returns: None
"""
self._prev_state = ([], [])
self._prev_c = u""
def exploreSpaDamLev(lev, state,
states, counter, matching, transitions):
# lists can't be hashed in Python because they are mutable, so convert to
# a tuple
assert(isinstance(lev, SparseDamerauLevenshteinAutomaton))
key = (tuple(state[0]), tuple(state[1]))
if key in states:
return states[key]
i = counter[0]
counter[0] += 1
states[key] = i
if lev.is_match(state):
matching.append(i)
for c in lev.transitions(state) | set(['*']):
# lev.clear_state() # this line made DamerauLevenshtein identical to #
# Levenshtein
newstate = lev.step(state, c)
j = exploreSpaDamLev(lev, newstate, states,
counter, matching, transitions)
transitions.append((i, j, c))
return i
class LevMatch(object):
def __init__(self, key_list, n, weight_list=[]):
for i, title in enumerate(key_list):
if not isinstance(title, unicode): # only work for python 2.x
assert(isinstance(title, basestring))
# convert to unicode object, not working
key_list[i] = title.decode('utf-8')
self._key_list = key_list
self._n = n
self._weight_list = weight_list
if weight_list:
self._word_weight_d = dict(zip(key_list, weight_list))
def print_key_value(self):
if (self._weight_list):
from collections import OrderedDict
word_weight_ord_d = OrderedDict(sorted(self._word_weight_d.items(), key=lambda t: t[
1], reverse=True)) # sorted by weight for display
for k, v in word_weight_ord_d.items():
print "%s -> %s" % (k, repr(v).decode("unicode-escape"))
else:
for k in self._key_list:
print "%s" % k
def items(self):
if any(self._word_weight_d):
return self._word_weight_d.items()
def keys(self, prefix=u"", top_n=10, debug_info=False):
'''time complexity: $M * N * n$ where $M$ is the total index count and $N$
is the query string length, and $n$ is the maximum DamerauLevenshtein distance'''
if (not len(prefix)):
return self._key_list
if not isinstance(prefix, unicode): # only work for python 2.x
assert(isinstance(prefix, basestring))
# convert to unicode object, not working
prefix = prefix.decode('utf-8')
# print "prefix=%s , len = %d" % ( prefix, len(prefix))
words = self._key_list
words_lev_l = [SparseLevenshteinAutomaton(
word, self._n) for word in words]
exact_match_res_l = []
fuzzy_match_res_l = []
from timeit import default_timer as timer
start_t = timer()
for word in words:
if len(word) and len(prefix) and (
(
len(prefix) == 1 and prefix[0] != word[0]
) or (
len(word) > 1 and
prefix[0] != word[0] and prefix[1] != word[1]
)
):
continue # it is unlikely that the first two input characters are misspelled or transposed
if word == prefix:
exact_match_res_l.append(word)
continue
common_len = min(len(prefix), len(word))
# match_prefix = []
# match_word = []
for i in range(common_len):
if prefix[i] != word[i]:
# if debug_info:
# print
# print "%d, prefix[i]: %s, word[i]: %s" % (i, prefix[i], word[i])
break
# only match the different part to speed up the matching process
match_prefix = prefix[i:]
match_word = word[i:]
# if debug_info:
# print
# print "i= %d, prefix '%s', match_prefix '%s', word '%s', match_word: '%s'" % (i,prefix,match_prefix,word, match_word)
lev = SparseLevenshteinAutomaton(
match_word, self._n)
cur_lev_state = lev.start()
for i, cur_c in enumerate(match_prefix):
cur_lev_state = lev.step(cur_lev_state, cur_c)
# new_lev_state = words_lev_d[word].step(
# cur_lev_state, cur_c, prev_lev_state, prev_c)
# prev_lev_state = cur_lev_state
# cur_lev_state = new_lev_state
# prev_c = cur_c
if not lev.can_match(cur_lev_state):
break
# print " ",cur_lev_state
if lev.is_match(cur_lev_state):
# if words_lev_d[word].can_match(cur_lev_state): # NOT correct cause this potentially match a COMPLETELY different/irrelevent word
# print cur_lev_state
err_num = lev.match_error(cur_lev_state)
# print "%s\t%s" % (type(err_num),err_num)
# print "%s, match error char count: %d" % (type(err_num),
# err_num)
if (len(word) == len(prefix) and not err_num):
exact_match_res_l.append(word)
else:
if debug_info:
print "\tprefix: %s, word: %s, cur_i: %d, match error count %d" % ("".join(prefix), word, i, err_num)
fuzzy_match_res_l.append(word)
if (exact_match_res_l or fuzzy_match_res_l) and self._weight_list:
import heapq
def get_weight(word_weight_d, key):
return word_weight_d[key]
from functools import partial
bound_get_weight = partial(get_weight, self._word_weight_d)
exact_match_res_l = list(heapq.nlargest(
top_n, exact_match_res_l, key=bound_get_weight))
fuzzy_match_res_l = list(heapq.nlargest(
top_n, fuzzy_match_res_l, key=bound_get_weight))
# exact_match_res_lst = list(heapq.nlargest(
# top_n, exact_match_res_l, key=lambda key: word_weight_d[key]))
# fuzzy_match_res_lst = list(heapq.nlargest(
# top_n, fuzzy_match_res_l, key=lambda key:
# word_weight_d[key]))
if debug_info:
sep_str_l = [u"----"]
else:
sep_str_l = []
res_l = exact_match_res_l + sep_str_l + fuzzy_match_res_l
elapsed_t = timer() - start_t
# import sys
# res_out = repr([x.encode(sys.stdout.encoding) for x in exact_match_res_l + fuzzy_match_res_l]).decode('string-escape')
# res_out = repr(res_l).decode('unicode-escape')
if debug_info:
print "\t\tlev distance %d, time: %f" % (self._n, elapsed_t)
if res_l:
# print "\t\tmatch result:\t%s" % res_out
print "\t\t'%s' -> \t%s" % \
("".join((prefix)), "\t".join(
['"' + elem + '"' for elem in res_l]))
print
return res_l
class DamLevMatch(LevMatch):
def keys(self, prefix=u"", top_n=10, debug_info=False):
'''time complexity: $M * N * n$ where $M$ is the total index count and $N$
is the query string length, and $n$ is the maximum DamerauLevenshtein distance'''
if (not len(prefix)):
return self._key_list
if not isinstance(prefix, unicode): # only work for python 2.x
assert(isinstance(prefix, basestring))
# convert to unicode object, not working
prefix = prefix.decode('utf-8')
# print "prefix=%s , len = %d" % ( prefix, len(prefix))
words = self._key_list
# words_lev_l = [SparseDamerauLevenshteinAutomaton(
# word, self._n) for word in words]
# words_lev_d = dict(zip(words, words_lev_l))
exact_match_res_l = []
fuzzy_match_res_l = []
from timeit import default_timer as timer
start_t = timer()
for word in words:
if len(word) and len(prefix) and (
(
len(prefix) == 1 and prefix[0] != word[0]
) or (
len(word) > 1 and prefix[0] != word[0] and
prefix[1] != word[1] and
(prefix[0] != word[1] or prefix[1] != word[0])
)
):
continue # it is unlikely that the first two input characters are misspelled or transposed
if word == prefix:
exact_match_res_l.append(word)
continue
common_len = min(len(prefix), len(word))
# match_prefix = []
# match_word = []
for i in range(common_len):
if prefix[i] != word[i]:
# if debug_info:
# print
# print "%d, prefix[i]: %s, word[i]: %s" % (i, prefix[i], word[i])
break
# only match the different part to speed up the matching process
match_prefix = prefix[i:]
match_word = word[i:]
# if debug_info:
# print
# print "i= %d, prefix '%s', match_prefix '%s', word '%s', match_word: '%s'" % (i,prefix,match_prefix,word, match_word)
lev = SparseDamerauLevenshteinAutomaton(
match_word, self._n)
cur_lev_state = lev.start()
# prev_lev_state = ([], [])
# prev_c = u""
lev.clear_state()
for i, cur_c in enumerate(match_prefix):
cur_lev_state = lev.step(cur_lev_state, cur_c)
# new_lev_state = words_lev_d[word].step(
# cur_lev_state, cur_c, prev_lev_state, prev_c)
# prev_lev_state = cur_lev_state
# cur_lev_state = new_lev_state
# prev_c = cur_c
if not lev.can_match(cur_lev_state):
break
# print " ",cur_lev_state
if lev.is_match(cur_lev_state):
# if words_lev_d[word].can_match(cur_lev_state): # NOT correct cause this potentially match a COMPLETELY different/irrelevent word
# print cur_lev_state
err_num = lev.match_error(cur_lev_state)
# print "%s\t%s" % (type(err_num),err_num)
# print "%s, match error char count: %d" % (type(err_num),
# err_num)
if (len(word) == len(prefix) and not err_num):
exact_match_res_l.append(word)
else:
if debug_info:
print "\tprefix: %s, word: %s, cur_i: %d, match error count %d" % ("".join(prefix), word, i, err_num)
fuzzy_match_res_l.append(word)
if (exact_match_res_l or fuzzy_match_res_l) and self._weight_list:
import heapq
def get_weight(word_weight_d, key):
return word_weight_d[key]
from functools import partial
bound_get_weight = partial(get_weight, self._word_weight_d)
exact_match_res_l = list(heapq.nlargest(
top_n, exact_match_res_l, key=bound_get_weight))
fuzzy_match_res_l = list(heapq.nlargest(
top_n, fuzzy_match_res_l, key=bound_get_weight))
# exact_match_res_lst = list(heapq.nlargest(
# top_n, exact_match_res_l, key=lambda key: word_weight_d[key]))
# fuzzy_match_res_lst = list(heapq.nlargest(
# top_n, fuzzy_match_res_l, key=lambda key:
# word_weight_d[key]))
if debug_info:
sep_str_l = [u"----"]
else:
sep_str_l = []
res_l = exact_match_res_l + sep_str_l + fuzzy_match_res_l
elapsed_t = timer() - start_t
# import sys
# res_out = repr([x.encode(sys.stdout.encoding) for x in exact_match_res_l + fuzzy_match_res_l]).decode('string-escape')
# res_out = repr(res_l).decode('unicode-escape')
if debug_info:
print "\t\tlev distance %d, time: %f" % (self._n, elapsed_t)
# print "\t\tmatch result:\t%s" % res_out
if res_l:
print "\t\t'%s' -> \t%s" % \
("".join((prefix)), "\t".join(
['"' + elem + '"' for elem in res_l]))
print
return res_l
| 41.998371
| 193
| 0.538876
| 3,447
| 25,787
| 3.834059
| 0.078909
| 0.037833
| 0.023154
| 0.019068
| 0.839815
| 0.817797
| 0.806295
| 0.791843
| 0.780645
| 0.76309
| 0
| 0.010505
| 0.339241
| 25,787
| 613
| 194
| 42.066884
| 0.765127
| 0.291077
| 0
| 0.688202
| 0
| 0.005618
| 0.015933
| 0
| 0
| 0
| 0
| 0
| 0.02809
| 0
| null | null | 0
| 0.019663
| null | null | 0.036517
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
562139e081d5c1a6ad10b91167dd92fce69d9d14
| 26,763
|
py
|
Python
|
design/migrations/0001_initial.py
|
pincoin/iclover
|
890fcbd836ebffa0de8cf9fbabee55f068b3bc8b
|
[
"MIT"
] | 1
|
2019-07-20T09:51:53.000Z
|
2019-07-20T09:51:53.000Z
|
design/migrations/0001_initial.py
|
pincoin/iclover
|
890fcbd836ebffa0de8cf9fbabee55f068b3bc8b
|
[
"MIT"
] | 11
|
2019-07-26T02:23:52.000Z
|
2022-03-11T23:41:09.000Z
|
design/migrations/0001_initial.py
|
pincoin/iclover
|
890fcbd836ebffa0de8cf9fbabee55f068b3bc8b
|
[
"MIT"
] | 1
|
2019-07-26T02:16:49.000Z
|
2019-07-26T02:16:49.000Z
|
# Generated by Django 2.1.7 on 2019-08-02 08:15
import design.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
import django.utils.timezone
import imagekit.models.fields
import model_utils.fields
import mptt.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=128, verbose_name='title')),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='design.Category', verbose_name='parent')),
],
options={
'verbose_name': '카테고리 품목',
'verbose_name_plural': '카테고리 품목',
},
),
migrations.CreateModel(
name='DeliveryOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='배송 방법')),
('price', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='배송 비용')),
('tax', models.BooleanField(default=False, verbose_name='부가세 포함여부')),
],
options={
'verbose_name': '상품옵션 _배송비',
'verbose_name_plural': '상품옵션 _배송비',
},
),
migrations.CreateModel(
name='EtcOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=255, null=True, verbose_name='기타 옵션')),
('option', models.CharField(blank=True, max_length=255, null=True, verbose_name='기타 옵션 상세')),
('memo', models.CharField(blank=True, max_length=255, null=True, verbose_name='기타 옵션 메모')),
('price', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='기타 옵션 가격')),
],
options={
'verbose_name': '상품옵션 _기타옵션',
'verbose_name_plural': '상품옵션 _기타옵션',
},
),
migrations.CreateModel(
name='HooOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='후가공명')),
('option', models.CharField(blank=True, max_length=100, null=True, verbose_name='상위 품목')),
('price', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='후가공 가격')),
],
options={
'verbose_name': '상품옵션 _후가공',
'verbose_name_plural': '상품옵션 _후가공',
},
),
migrations.CreateModel(
name='Option',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(max_length=128, verbose_name='title')),
('slug', models.SlugField(allow_unicode=True, help_text='A short label containing only letters, numbers, underscores or hyphens for URL', max_length=255, unique=True, verbose_name='slug')),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='options', to='design.Category', verbose_name='category')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='design.Option', verbose_name='parent')),
],
options={
'verbose_name': '상품옵션 기타',
'verbose_name_plural': '상품옵션 기타',
},
managers=[
('_tree_manager', django.db.models.manager.Manager()),
],
),
migrations.CreateModel(
name='OrderImg',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('order_img_num', models.IntegerField(blank=True, null=True, verbose_name='이미지 넘버링')),
('plush_date', models.DateTimeField(auto_now_add=True, verbose_name='등록일')),
('name', models.CharField(blank=True, max_length=255, verbose_name='제목')),
('keyword', models.CharField(blank=True, max_length=255, verbose_name='키워드')),
('state_at', models.IntegerField(blank=True, choices=[(0, '대기'), (1, '수정완료'), (2, '확정'), (3, '보류'), (4, '취소'), (5, '무효처리')], default=0, null=True, verbose_name='시안 상태')),
('state', models.BooleanField(default=True, verbose_name='노출 상태')),
('images', imagekit.models.fields.ProcessedImageField(blank=True, upload_to=design.models.OrderImg.upload_to_order, verbose_name='sample_img')),
('link', models.CharField(blank=True, max_length=255, verbose_name='링크 url')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='design.Category', verbose_name='카테고리')),
],
options={
'verbose_name': '주문 3_시안',
'verbose_name_plural': '주문 3_시안',
},
),
migrations.CreateModel(
name='OrderInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('today_num', models.IntegerField(default=1, verbose_name='today-넘버링')),
('joo_date', models.DateField(blank=True, null=True, verbose_name='주문일')),
('order_date', models.DateField(blank=True, null=True, verbose_name='발주일')),
('company', models.CharField(blank=True, max_length=255, null=True, verbose_name='업체명')),
('company_keyword', models.CharField(blank=True, max_length=255, null=True, verbose_name='업체 키워드')),
('address', models.CharField(blank=True, max_length=255, null=True, verbose_name='주소')),
('tell', models.CharField(blank=True, max_length=255, null=True, verbose_name='연락처')),
('state', models.IntegerField(blank=True, choices=[(0, '견적'), (1, '주문'), (2, '시안'), (3, '제작'), (4, '완료'), (5, '취소'), (6, '보류'), (7, '환불'), (8, '입금대기'), (9, '배송')], db_index=True, default=0, verbose_name='상태값')),
('tax', models.BooleanField(default=True, verbose_name='부가세포함')),
('keywords', models.CharField(blank=True, max_length=1000, null=True, verbose_name='검색창 내용')),
('checker', models.CharField(blank=True, max_length=255, null=True, verbose_name='시안 확인')),
('deposit', models.CharField(blank=True, max_length=100, null=True, verbose_name='입금 확인')),
('options', models.CharField(blank=True, max_length=255, null=True, verbose_name='결제/포함/택배')),
('employees', models.CharField(blank=True, db_index=True, max_length=100, null=True, verbose_name='작업자')),
('fix_manager', models.CharField(blank=True, db_index=True, max_length=100, null=True, verbose_name='담당자')),
('in_memo', models.CharField(blank=True, max_length=1000, null=True, verbose_name='관리자메모')),
('out_memo', models.CharField(blank=True, max_length=1000, null=True, verbose_name='고객표시 메모')),
('deposit_check', models.IntegerField(blank=True, null=True, verbose_name='입금 확인 및 차액')),
('uuid', models.UUIDField(default=uuid.uuid1)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='거래처')),
],
options={
'verbose_name': '주문 1_정보',
'verbose_name_plural': '주문 1_정보',
},
),
migrations.CreateModel(
name='OrderList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('list_sort', models.IntegerField(db_index=True, verbose_name='내역 순서')),
('num', models.IntegerField(blank=True, null=True, verbose_name='동일 전표')),
('code', models.IntegerField(blank=True, db_index=True, null=True, verbose_name='품목코드')),
('name', models.CharField(blank=True, max_length=1000, null=True, verbose_name='제품명')),
('standard', models.CharField(blank=True, max_length=1000, null=True, verbose_name='규격')),
('quantity', models.IntegerField(db_index=True, verbose_name='수량')),
('price', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='매입가')),
('price_tax', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='매입 부가세')),
('selling_price', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='판매가')),
('selling_price_tax', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='판매 부가세')),
('group_manage', models.CharField(blank=True, max_length=1000, null=True, verbose_name='관리 항목')),
('gram', models.CharField(blank=True, max_length=1000, null=True, verbose_name='기타')),
('etc', models.CharField(blank=True, max_length=1000, null=True, verbose_name='적요')),
('memo', models.CharField(blank=True, max_length=2000, null=True, verbose_name='메모')),
('order_info', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='order_list', to='design.OrderInfo', verbose_name='주문 정보')),
],
options={
'verbose_name': '주문 2_품목',
'verbose_name_plural': '주문 2_품목',
},
),
migrations.CreateModel(
name='OrderMemo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('check', models.BooleanField(default=False, verbose_name='확인')),
('memo', models.CharField(blank=True, max_length=9000, null=True, verbose_name='내용')),
('location', models.CharField(blank=True, max_length=9000, null=True, verbose_name='위치 좌표')),
('order_img', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='design.OrderImg', verbose_name='주문 이미지')),
],
options={
'verbose_name': '주문 4_시안 메모',
'verbose_name_plural': '주문 4_시안 메모',
},
),
migrations.CreateModel(
name='PaperOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='용지 이름')),
('gram', models.CharField(blank=True, max_length=100, null=True, verbose_name='그람수')),
('color', models.CharField(blank=True, max_length=100, null=True, verbose_name='색상')),
('option', models.CharField(blank=True, max_length=100, null=True, verbose_name='옵션 코팅/유무')),
],
options={
'verbose_name': '상품옵션 _용지옵션',
'verbose_name_plural': '상품옵션 _용지옵션',
},
),
migrations.CreateModel(
name='ProductBase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('code', models.IntegerField(blank=True, null=True, verbose_name='품목코드')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='품목명')),
('slug', models.SlugField(allow_unicode=True, blank=True, max_length=255, null=True, unique=True, verbose_name='slug')),
('sell_price', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='판매가')),
('buy_price', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='매입가')),
('main_quantity', models.BooleanField(default=False, verbose_name='메인 수량')),
('ecount', models.BooleanField(default=False, verbose_name='이카운트 전송')),
('product_active', models.BooleanField(default=False, verbose_name='활성화')),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='product_base_category', to='design.Category', verbose_name='카테고리')),
('etc', models.ManyToManyField(blank=True, related_name='product_base_etc', to='design.EtcOption')),
('paper', models.ManyToManyField(blank=True, related_name='product_base_paper', to='design.PaperOption')),
],
options={
'verbose_name': '상품옵션_통합',
'verbose_name_plural': '상품옵션_통합',
},
),
migrations.CreateModel(
name='ProductText',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('standard', models.CharField(blank=True, max_length=255, null=True, verbose_name='규격명')),
('horizontal', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='가로')),
('vertical', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='세로')),
('width', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='넓이')),
('height', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='높이')),
('paper', models.CharField(blank=True, max_length=255, null=True, verbose_name='재질')),
('gram', models.CharField(blank=True, max_length=100, null=True, verbose_name='그람수')),
('color', models.CharField(blank=True, max_length=100, null=True, verbose_name='색상')),
('paper_option', models.CharField(blank=True, max_length=100, null=True, verbose_name='옵션 코팅/유무')),
('side', models.CharField(blank=True, max_length=100, null=True, verbose_name='양면 / 단면')),
('etc', models.CharField(blank=True, max_length=100, null=True, verbose_name='기타 정보')),
('etc_option', models.CharField(blank=True, max_length=100, null=True, verbose_name='기타 옵션 상세')),
('memo', models.CharField(blank=True, max_length=100, null=True, verbose_name='기타 옵션 메모')),
('code', models.IntegerField(blank=True, null=True, verbose_name='품목코드')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='품목명')),
('slug', models.SlugField(allow_unicode=True, blank=True, max_length=255, null=True, unique=True, verbose_name='slug')),
('sell_price', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='판매가')),
('buy_price', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='매입가')),
('quantity', models.DecimalField(blank=True, decimal_places=4, default=0, max_digits=11, null=True, verbose_name='메인 수량')),
('main_quantity', models.BooleanField(default=False, verbose_name='메인 수량')),
('ecount', models.BooleanField(default=True, verbose_name='이카운트 전송')),
('product_active', models.BooleanField(default=False, verbose_name='활성화')),
('product_version', models.IntegerField(blank=True, default=1, null=True, verbose_name='version')),
('group', models.IntegerField(blank=True, null=True, verbose_name='group')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='design.Category', verbose_name='카테고리')),
('supplier', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='매입처')),
],
options={
'verbose_name': '상품옵션_통합 str',
'verbose_name_plural': '상품옵션_통합 str',
},
),
migrations.CreateModel(
name='SectorsCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=128, verbose_name='title')),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='design.SectorsCategory', verbose_name='parent')),
],
options={
'verbose_name': '카테고리 업종',
'verbose_name_plural': '카테고리 업종',
},
),
migrations.CreateModel(
name='SideOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='양면 / 단면')),
],
options={
'verbose_name': '상품옵션 _양면/단면',
'verbose_name_plural': '상품옵션 _양면/단면',
},
),
migrations.CreateModel(
name='StandardOption',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('title', models.CharField(blank=True, max_length=100, null=True, verbose_name='품목명')),
('horizontal', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='가로')),
('vertical', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='세로')),
('width', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='넓이')),
('height', models.DecimalField(blank=True, decimal_places=4, max_digits=11, null=True, verbose_name='높이')),
],
options={
'verbose_name': '상품옵션 _기본 규격',
'verbose_name_plural': '상품옵션 _기본 규격',
},
),
migrations.AddField(
model_name='productbase',
name='side',
field=models.ManyToManyField(blank=True, related_name='product_base_side', to='design.SideOption'),
),
migrations.AddField(
model_name='productbase',
name='standard',
field=models.ManyToManyField(blank=True, related_name='product_base_standard', to='design.StandardOption'),
),
migrations.AddField(
model_name='productbase',
name='supplier',
field=models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL, verbose_name='매입처'),
),
migrations.AddField(
model_name='orderimg',
name='order_info',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='order_img', to='design.OrderInfo', verbose_name='주문 리스트'),
),
migrations.AddField(
model_name='orderimg',
name='sectors_category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='design.SectorsCategory', verbose_name='업종'),
),
]
| 72.332432
| 227
| 0.623323
| 2,971
| 26,763
| 5.439583
| 0.102996
| 0.127962
| 0.077037
| 0.087
| 0.83553
| 0.813997
| 0.774086
| 0.75905
| 0.74191
| 0.712518
| 0
| 0.013627
| 0.226768
| 26,763
| 369
| 228
| 72.528455
| 0.767324
| 0.001681
| 0
| 0.494475
| 1
| 0
| 0.130034
| 0.004005
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027624
| 0
| 0.038674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
562d3d97a95991880cdc57a20b3d842dfd0c0e9a
| 152
|
py
|
Python
|
srs/blog/admin.py
|
Ramguru94/python_django_school
|
bedaba575f8986fd17aaf7dcb920769224a9fc07
|
[
"MIT"
] | 3
|
2020-03-27T12:34:15.000Z
|
2021-01-06T09:08:34.000Z
|
srs/blog/admin.py
|
Ramguru94/python_django_school
|
bedaba575f8986fd17aaf7dcb920769224a9fc07
|
[
"MIT"
] | null | null | null |
srs/blog/admin.py
|
Ramguru94/python_django_school
|
bedaba575f8986fd17aaf7dcb920769224a9fc07
|
[
"MIT"
] | 2
|
2020-10-25T14:11:32.000Z
|
2020-11-17T15:09:48.000Z
|
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from blog.models import Post
admin.site.register(Post)
| 19
| 32
| 0.809211
| 23
| 152
| 5.347826
| 0.521739
| 0.162602
| 0.276423
| 0.373984
| 0.455285
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 152
| 7
| 33
| 21.714286
| 0.931818
| 0.171053
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5681991fa1e772ce60841c831b39e58ef39685a0
| 62,869
|
py
|
Python
|
Pre-Processing/src/audio_processing/file_lengths.py
|
jaquielajoie/Alzheimers_Project
|
0abc03dae4a81e390c9b0d1c479627273da9daf1
|
[
"Apache-2.0"
] | 5
|
2021-04-17T02:37:56.000Z
|
2021-11-14T07:35:44.000Z
|
Pre-Processing/src/audio_processing/file_lengths.py
|
jaquielajoie/Alzheimers_Project
|
0abc03dae4a81e390c9b0d1c479627273da9daf1
|
[
"Apache-2.0"
] | null | null | null |
Pre-Processing/src/audio_processing/file_lengths.py
|
jaquielajoie/Alzheimers_Project
|
0abc03dae4a81e390c9b0d1c479627273da9daf1
|
[
"Apache-2.0"
] | null | null | null |
class FileLengths:
def __init__(self):
self.file_lengths = [
[
54.3869387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/212-1.mp3"
],
[
38.138775510204084,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/271-2.mp3"
],
[
50.10285714285714,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/707-0.mp3"
],
[
110.18448979591837,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/236-0.mp3"
],
[
155.8465306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/033-1.mp3"
],
[
80.352,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/563-0.mp3"
],
[
70.26938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/526-1.mp3"
],
[
55.58857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/033-0.mp3"
],
[
81.84163265306123,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/076-0.mp3"
],
[
68.54530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/172-1.mp3"
],
[
66.53387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/587-0.mp3"
],
[
82.65142857142857,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/257-0.mp3"
],
[
89.54775510204081,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/212-0.mp3"
],
[
70.19102040816327,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/049-0.mp3"
],
[
108.90448979591837,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/212-2.mp3"
],
[
108.61714285714285,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/257-2.mp3"
],
[
49.50204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/465-0.mp3"
],
[
82.8,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/578-0.mp3"
],
[
36.28408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/172-3.mp3"
],
[
113.73714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/154-0.mp3"
],
[
80.03918367346938,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/585-0.mp3"
],
[
40.333061224489796,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/601-0.mp3"
],
[
95.13795918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/076-2.mp3"
],
[
60.70857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/033-2.mp3"
],
[
268.4865306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/268-0.mp3"
],
[
35.944489795918365,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/458-0.mp3"
],
[
104.35918367346939,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/089-0.mp3"
],
[
30.798367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/660-0.mp3"
],
[
62.223673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/050-0.mp3"
],
[
64.86204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/033-3.mp3"
],
[
113.42367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/154-1.mp3"
],
[
128.88816326530613,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/329-0.mp3"
],
[
36.179591836734694,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/172-2.mp3"
],
[
76.35591836734694,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/705-0.mp3"
],
[
76.7738775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/212-3.mp3"
],
[
60.16,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/049-1.mp3"
],
[
29.466122448979593,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/234-0.mp3"
],
[
39.862857142857145,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/291-1.mp3"
],
[
82.44244897959183,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/310-0.mp3"
],
[
40.777142857142856,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/355-0.mp3"
],
[
41.16897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/190-1.mp3"
],
[
83.17387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/334-1.mp3"
],
[
37.198367346938774,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/598-0.mp3"
],
[
78.7330612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/035-1.mp3"
],
[
52.03591836734694,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/368-0.mp3"
],
[
81.42367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/581-0.mp3"
],
[
77.00897959183673,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/094-1.mp3"
],
[
121.392,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/539-0.mp3"
],
[
109.87102040816326,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/461-0.mp3"
],
[
74.08326530612244,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/035-0.mp3"
],
[
99.91836734693878,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/334-0.mp3"
],
[
47.33387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/504-0.mp3"
],
[
58.200816326530614,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/355-1.mp3"
],
[
29.048163265306123,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/640-0.mp3"
],
[
51.304489795918364,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/291-2.mp3"
],
[
71.31428571428572,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/190-2.mp3"
],
[
53.498775510204084,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/642-0.mp3"
],
[
55.51020408163265,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/607-0.mp3"
],
[
88.39836734693877,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/506-0.mp3"
],
[
98.56,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/070-2.mp3"
],
[
33.22775510204082,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/216-0.mp3"
],
[
57.286530612244896,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/076-4.mp3"
],
[
88.76408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/033-4.mp3"
],
[
70.6873469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/094-3.mp3"
],
[
96.3395918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/094-2.mp3"
],
[
48.77061224489796,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/216-1.mp3"
],
[
35.65714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/703-0.mp3"
],
[
75.96,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/357-0.mp3"
],
[
60.55183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/293-1.mp3"
],
[
81.63265306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/030-0.mp3"
],
[
34.795102040816325,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/016-3.mp3"
],
[
93.49224489795918,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/350-1.mp3"
],
[
33.201632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/168-0.mp3"
],
[
150.1518367346939,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/269-0.mp3"
],
[
63.3469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/051-1.mp3"
],
[
35.47428571428571,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/544-0.mp3"
],
[
134.7395918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/029-1.mp3"
],
[
46.18448979591837,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/579-0.mp3"
],
[
71.44489795918368,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/091-1.mp3"
],
[
59.40244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/270-1.mp3"
],
[
86.23020408163265,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/213-2.mp3"
],
[
45.008979591836734,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/134-1.mp3"
],
[
76.01632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/349-0.mp3"
],
[
39.82430839002267,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/157-2.mp3"
],
[
61.10040816326531,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/134-0.mp3"
],
[
71.34040816326531,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/010-4.mp3"
],
[
65.48897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/349-1.mp3"
],
[
114.59918367346938,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/213-3.mp3"
],
[
163.60489795918366,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/235-0.mp3"
],
[
87.82367346938776,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/270-0.mp3"
],
[
60.264489795918365,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/091-0.mp3"
],
[
59.32408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/704-0.mp3"
],
[
77.00897959183673,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/029-0.mp3"
],
[
81.37142857142857,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/237-2.mp3"
],
[
23.588571428571427,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/544-1.mp3"
],
[
55.48408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/051-0.mp3"
],
[
101.4073469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/269-1.mp3"
],
[
84.37551020408164,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/168-1.mp3"
],
[
86.15183673469387,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/350-0.mp3"
],
[
75.8595918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/030-1.mp3"
],
[
59.42857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/624-0.mp3"
],
[
60.18612244897959,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/016-0.mp3"
],
[
28.473469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/663-0.mp3"
],
[
120.89469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/014-2.mp3"
],
[
139.46775510204083,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/051-2.mp3"
],
[
70.13877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/527-0.mp3"
],
[
56.32,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/562-0.mp3"
],
[
97.28,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/466-1.mp3"
],
[
61.28326530612245,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/091-2.mp3"
],
[
101.30285714285715,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/213-1.mp3"
],
[
85.73387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/270-2.mp3"
],
[
87.53632653061224,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/235-2.mp3"
],
[
164.7596598639456,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/157-1.mp3"
],
[
45.19183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/134-2.mp3"
],
[
60.21224489795918,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/134-3.mp3"
],
[
82.42530612244897,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/157-0.mp3"
],
[
75.91183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/466-0.mp3"
],
[
74.8930612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/173-1.mp3"
],
[
65.95918367346938,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/237-1.mp3"
],
[
56.088,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/527-1.mp3"
],
[
109.37469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/051-3.mp3"
],
[
131.68326530612245,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/053-1.mp3"
],
[
45.68816326530612,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/016-1.mp3"
],
[
63.111836734693874,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/252-0.mp3"
],
[
41.48244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/097-1.mp3"
],
[
39.54938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/356-1.mp3"
],
[
113.2669387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/057-1.mp3"
],
[
51.74857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/337-0.mp3"
],
[
56.08489795918367,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/010-3.mp3"
],
[
104.38530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/010-2.mp3"
],
[
79.6734693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/057-0.mp3"
],
[
93.80571428571429,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/356-0.mp3"
],
[
45.94938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/289-2.mp3"
],
[
67.70938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/252-1.mp3"
],
[
41.48244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/702-0.mp3"
],
[
80.01306122448979,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/016-4.mp3"
],
[
141.0873469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/276-0.mp3"
],
[
136.09795918367348,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/369-0.mp3"
],
[
57.88734693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/559-0.mp3"
],
[
66.56,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/620-0.mp3"
],
[
71.44489795918368,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/148-0.mp3"
],
[
125.51836734693877,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/057-2.mp3"
],
[
107.25877551020409,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/698-0.mp3"
],
[
56.42448979591837,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/354-0.mp3"
],
[
51.69632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/311-0.mp3"
],
[
112.14367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/010-0.mp3"
],
[
77.53142857142858,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/010-1.mp3"
],
[
51.38285714285714,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/252-2.mp3"
],
[
49.47591836734694,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/580-0.mp3"
],
[
65.17551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/639-0.mp3"
],
[
68.93714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/681-0.mp3"
],
[
71.9934693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/341-0.mp3"
],
[
98.13616780045352,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/184-1.mp3"
],
[
82.80816326530612,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/005-0.mp3"
],
[
94.01469387755103,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/635-0.mp3"
],
[
33.671836734693876,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/488-1.mp3"
],
[
81.5804081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/358-1.mp3"
],
[
144.6138775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/125-0.mp3"
],
[
39.20979591836735,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/381-0.mp3"
],
[
70.21714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/144-1.mp3"
],
[
51.226122448979595,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/339-0.mp3"
],
[
55.431836734693874,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/144-0.mp3"
],
[
57.652244897959186,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/595-0.mp3"
],
[
56.92081632653061,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/381-1.mp3"
],
[
37.4334693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/358-0.mp3"
],
[
42.81469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/488-0.mp3"
],
[
53.34204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/007-3.mp3"
],
[
113.81551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/023-2.mp3"
],
[
50.07673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/184-0.mp3"
],
[
62.58292517006803,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/184-2.mp3"
],
[
110.52408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/023-0.mp3"
],
[
81.18857142857142,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/066-0.mp3"
],
[
34.481632653061226,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/656-0.mp3"
],
[
54.543673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/007-1.mp3"
],
[
66.71673469387756,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/493-1.mp3"
],
[
157.7273469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/247-0.mp3"
],
[
49.397551020408166,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/127-0.mp3"
],
[
60.31673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/226-0.mp3"
],
[
84.87183673469387,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/493-0.mp3"
],
[
94.92897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/306-0.mp3"
],
[
65.8808163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/343-0.mp3"
],
[
55.222857142857144,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/005-2.mp3"
],
[
53.42040816326531,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/573-0.mp3"
],
[
40.96,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/672-0.mp3"
],
[
37.328979591836735,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/164-2.mp3"
],
[
95.34693877551021,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/222-0.mp3"
],
[
28.813061224489797,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/676-0.mp3"
],
[
81.29306122448979,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/001-2.mp3"
],
[
44.8,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/283-1.mp3"
],
[
235.8595918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/003-0.mp3"
],
[
61.02204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/046-0.mp3"
],
[
64.86204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/497-0.mp3"
],
[
56.60734693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/497-1.mp3"
],
[
55.797551020408164,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/283-0.mp3"
],
[
72.01959183673469,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/062-0.mp3"
],
[
128.2873469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/222-1.mp3"
],
[
51.313265306122446,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/164-3.mp3"
],
[
116.66285714285715,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/206-0.mp3"
],
[
36.33632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/220-1.mp3"
],
[
191.16408163265305,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/dementia/018-0.mp3"
],
[
54.23020408163265,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/113-1.mp3"
],
[
45.89714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/092-0.mp3"
],
[
70.79183673469387,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/172-0.mp3"
],
[
42.840816326530614,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/056-4.mp3"
],
[
99.57877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/137-0.mp3"
],
[
53.21142857142857,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/013-4.mp3"
],
[
51.98367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/627-0.mp3"
],
[
99.78775510204082,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/015-2.mp3"
],
[
40.80326530612245,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/297-1.mp3"
],
[
54.073469387755104,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/052-0.mp3"
],
[
56.11102040816326,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/209-2.mp3"
],
[
67.21306122448979,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/196-1.mp3"
],
[
60.969795918367346,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/196-0.mp3"
],
[
121.46938775510205,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/209-3.mp3"
],
[
69.1461224489796,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/015-3.mp3"
],
[
31.634285714285713,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/332-0.mp3"
],
[
54.674285714285716,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/137-1.mp3"
],
[
100.93714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/210-2.mp3"
],
[
59.794285714285714,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/092-1.mp3"
],
[
41.95265306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/113-0.mp3"
],
[
73.56081632653061,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/686-0.mp3"
],
[
57.6,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/113-2.mp3"
],
[
26.30530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/092-3.mp3"
],
[
101.35510204081632,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/684-0.mp3"
],
[
47.72571428571428,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/255-0.mp3"
],
[
71.49714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/028-1.mp3"
],
[
65.8808163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/137-3.mp3"
],
[
28.133877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/295-0.mp3"
],
[
97.69795918367348,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/015-1.mp3"
],
[
61.49224489795918,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/297-2.mp3"
],
[
39.44489795918367,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/209-1.mp3"
],
[
27.141224489795917,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/052-2.mp3"
],
[
117.18530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/015-0.mp3"
],
[
26.070204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/295-1.mp3"
],
[
38.373877551020406,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/137-2.mp3"
],
[
41.87428571428571,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/255-1.mp3"
],
[
90.33142857142857,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/210-1.mp3"
],
[
37.7469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/092-2.mp3"
],
[
50.31183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/113-3.mp3"
],
[
96.52244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/128-1.mp3"
],
[
39.07918367346939,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/229-1.mp3"
],
[
73.97877551020409,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/054-0.mp3"
],
[
71.47102040816327,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/028-4.mp3"
],
[
53.05469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/013-2.mp3"
],
[
75.25877551020409,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/248-0.mp3"
],
[
66.76897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/275-0.mp3"
],
[
54.25632653061224,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/096-2.mp3"
],
[
78.0538775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/015-4.mp3"
],
[
62.61551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/150-1.mp3"
],
[
45.74040816326531,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/150-0.mp3"
],
[
47.46448979591837,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/275-1.mp3"
],
[
84.1404081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/248-1.mp3"
],
[
44.93061224489796,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/056-3.mp3"
],
[
58.51428571428571,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/013-3.mp3"
],
[
63.29469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/192-2.mp3"
],
[
131.57877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/128-2.mp3"
],
[
34.220408163265304,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/229-2.mp3"
],
[
45.92326530612245,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/192-0.mp3"
],
[
71.28816326530612,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/096-1.mp3"
],
[
76.14693877551021,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/232-1.mp3"
],
[
70.47836734693878,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/150-2.mp3"
],
[
100.54530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/017-4.mp3"
],
[
74.73632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/232-0.mp3"
],
[
57.10367346938776,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/013-0.mp3"
],
[
39.157551020408164,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/056-0.mp3"
],
[
40.6204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/248-2.mp3"
],
[
168.6204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/128-3.mp3"
],
[
77.84489795918367,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/336-1.mp3"
],
[
77.00897959183673,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/109-1.mp3"
],
[
92.78693877551021,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/296-2.mp3"
],
[
36.440816326530616,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/114-4.mp3"
],
[
37.17224489795918,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/208-1.mp3"
],
[
86.54367346938776,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/034-4.mp3"
],
[
67.73551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/155-0.mp3"
],
[
35.056326530612246,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/071-4.mp3"
],
[
83.61795918367346,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/678-0.mp3"
],
[
67.34367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/256-2.mp3"
],
[
32.496326530612244,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/171-1.mp3"
],
[
43.10204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/171-0.mp3"
],
[
85.49877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/211-1.mp3"
],
[
62.69387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/208-0.mp3"
],
[
59.14122448979592,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/661-0.mp3"
],
[
89.12979591836735,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/296-1.mp3"
],
[
73.11673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/208-2.mp3"
],
[
51.46122448979592,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/155-3.mp3"
],
[
40.646530612244895,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/256-1.mp3"
],
[
46.10612244897959,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/093-0.mp3"
],
[
30.824489795918367,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/093-1.mp3"
],
[
62.0669387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/256-0.mp3"
],
[
91.55918367346939,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/155-2.mp3"
],
[
75.62448979591836,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/211-2.mp3"
],
[
64.88816326530612,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/296-0.mp3"
],
[
55.51020408163265,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/109-3.mp3"
],
[
60.68244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/068-3.mp3"
],
[
60.34285714285714,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/132-1.mp3"
],
[
49.136326530612244,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/114-2.mp3"
],
[
48.53551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/175-3.mp3"
],
[
33.28,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/130-3.mp3"
],
[
62.95510204081633,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/034-2.mp3"
],
[
51.69632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/071-2.mp3"
],
[
81.9461224489796,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/073-0.mp3"
],
[
72.56816326530613,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/073-1.mp3"
],
[
87.58857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/034-3.mp3"
],
[
61.544489795918366,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/071-3.mp3"
],
[
58.51428571428571,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/292-1.mp3"
],
[
58.5665306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/175-2.mp3"
],
[
27.40244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/130-2.mp3"
],
[
38.76571428571429,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/274-2.mp3"
],
[
49.16244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/114-3.mp3"
],
[
57.67836734693878,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/132-0.mp3"
],
[
53.05469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/068-2.mp3"
],
[
61.36163265306122,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/109-4.mp3"
],
[
68.23183673469387,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/068-0.mp3"
],
[
57.626122448979594,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/114-1.mp3"
],
[
55.77142857142857,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/274-0.mp3"
],
[
66.71673469387756,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/175-0.mp3"
],
[
34.14204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/071-1.mp3"
],
[
60.86530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/034-1.mp3"
],
[
74.34448979591836,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/129-1.mp3"
],
[
61.83183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/055-0.mp3"
],
[
83.25224489795919,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/073-3.mp3"
],
[
45.06122448979592,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/071-0.mp3"
],
[
92.23836734693877,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/034-0.mp3"
],
[
29.675102040816327,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/130-1.mp3"
],
[
72.96,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/175-1.mp3"
],
[
39.3665306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/274-1.mp3"
],
[
45.583673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/114-0.mp3"
],
[
97.25387755102041,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/121-4.mp3"
],
[
41.53469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/118-0.mp3"
],
[
37.12,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/322-2.mp3"
],
[
56.05877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/118-3.mp3"
],
[
65.98530612244897,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/042-1.mp3"
],
[
34.82122448979592,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/021-2.mp3"
],
[
49.11020408163265,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/059-2.mp3"
],
[
78.65469387755103,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/245-2.mp3"
],
[
56.52897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/146-1.mp3"
],
[
38.713469387755104,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/059-3.mp3"
],
[
32.888163265306126,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/021-3.mp3"
],
[
49.78938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/118-2.mp3"
],
[
81.03183673469388,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/086-4.mp3"
],
[
64.54857142857144,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/322-1.mp3"
],
[
52.74122448979592,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/304-2.mp3"
],
[
84.21877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/243-1.mp3"
],
[
102.26938775510204,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/121-2.mp3"
],
[
51.14775510204082,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/142-1.mp3"
],
[
53.18530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/107-1.mp3"
],
[
106.81469387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/086-0.mp3"
],
[
54.59591836734694,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/267-0.mp3"
],
[
33.72408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/166-0.mp3"
],
[
36.4669387755102,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/042-4.mp3"
],
[
52.950204081632656,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/140-3.mp3"
],
[
59.66367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/138-3.mp3"
],
[
48.53551020408163,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/302-0.mp3"
],
[
95.0595918367347,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/105-2.mp3"
],
[
102.4,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/166-1.mp3"
],
[
56.60734693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/086-1.mp3"
],
[
68.02285714285715,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/142-0.mp3"
],
[
82.67755102040816,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/121-3.mp3"
],
[
128.20897959183674,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/243-0.mp3"
],
[
28.212244897959184,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/107-2.mp3"
],
[
95.50367346938775,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/121-1.mp3"
],
[
43.33714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/298-1.mp3"
],
[
50.83428571428571,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/086-3.mp3"
],
[
84.29714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/140-0.mp3"
],
[
72.75102040816327,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/105-0.mp3"
],
[
29.622857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/021-4.mp3"
],
[
34.61224489795919,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/059-4.mp3"
],
[
48.40489795918367,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/182-3.mp3"
],
[
35.18693877551021,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/631-0.mp3"
],
[
77.27020408163266,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/138-1.mp3"
],
[
55.32734693877551,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/166-2.mp3"
],
[
54.543673469387755,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/105-1.mp3"
],
[
57.67836734693878,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/267-2.mp3"
],
[
61.17877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/118-4.mp3"
],
[
46.75918367346939,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/086-2.mp3"
],
[
147.8530612244898,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/121-0.mp3"
],
[
48.71836734693878,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/142-3.mp3"
],
[
71.75836734693877,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/227-1.mp3"
],
[
78.62857142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/612-0.mp3"
],
[
98.2204081632653,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/323-0.mp3"
],
[
55.48408163265306,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/022-0.mp3"
],
[
44.01632653061225,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/022-1.mp3"
],
[
77.50530612244899,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/323-1.mp3"
],
[
132.2057142857143,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/225-2.mp3"
],
[
77.60979591836735,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/145-3.mp3"
],
[
58.5665306122449,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/227-0.mp3"
],
[
52.21877551020408,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/145-1.mp3"
],
[
115.01714285714286,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/225-0.mp3"
],
[
79.80408163265307,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/124-0.mp3"
],
[
43.59836734693877,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/006-2.mp3"
],
[
52.32326530612245,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/340-0.mp3"
],
[
52.375510204081635,
"/Users/jackie/Documents/AD_CGU/Pre-Processing/test_data/mp3/control/022-2.mp3"
]
]
| 39.967578
| 96
| 0.506863
| 6,280
| 62,869
| 4.948567
| 0.107484
| 0.138752
| 0.252277
| 0.277504
| 0.776941
| 0.776941
| 0.776941
| 0.776941
| 0.776941
| 0.776941
| 0
| 0.215941
| 0.374557
| 62,869
| 1,572
| 97
| 39.993003
| 0.5744
| 0
| 0
| 0.26972
| 0
| 0.249364
| 0.483291
| 0.483291
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000636
| false
| 0
| 0
| 0
| 0.001272
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3b0d6fd18472cbd0df58e57e09cd86895e9e66cb
| 124
|
py
|
Python
|
app/main/service/balance_service.py
|
luifer-villalba/capibara-server
|
22b2a9ea31fcf11d2930c55998824e980c29d1cb
|
[
"MIT"
] | null | null | null |
app/main/service/balance_service.py
|
luifer-villalba/capibara-server
|
22b2a9ea31fcf11d2930c55998824e980c29d1cb
|
[
"MIT"
] | null | null | null |
app/main/service/balance_service.py
|
luifer-villalba/capibara-server
|
22b2a9ea31fcf11d2930c55998824e980c29d1cb
|
[
"MIT"
] | null | null | null |
from app.main import db
from app.main.model.balance import Balance
def get_all_balances():
return Balance.query.all()
| 17.714286
| 42
| 0.766129
| 20
| 124
| 4.65
| 0.65
| 0.150538
| 0.236559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 124
| 6
| 43
| 20.666667
| 0.877358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
3b108ece47dbf079927f08c344a0bd6738c6aabf
| 120
|
py
|
Python
|
conftest.py
|
michalk8/anndata
|
664e32b0aa6625fe593370d37174384c05abfd4e
|
[
"BSD-3-Clause"
] | null | null | null |
conftest.py
|
michalk8/anndata
|
664e32b0aa6625fe593370d37174384c05abfd4e
|
[
"BSD-3-Clause"
] | null | null | null |
conftest.py
|
michalk8/anndata
|
664e32b0aa6625fe593370d37174384c05abfd4e
|
[
"BSD-3-Clause"
] | null | null | null |
# This file exists just to allow ignoring warnings without test collection failing on CI
# TODO: Fix that
import pytest
| 30
| 88
| 0.8
| 19
| 120
| 5.052632
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175
| 120
| 3
| 89
| 40
| 0.969697
| 0.841667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8ee3540b670d397c75f44f144c5818a4eae703c0
| 66
|
py
|
Python
|
examples/underscored/trivial_string.py
|
doboy/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 7
|
2016-09-23T00:44:05.000Z
|
2021-10-04T21:19:12.000Z
|
examples/underscored/trivial_string.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 1
|
2016-09-23T00:45:05.000Z
|
2019-02-16T19:05:37.000Z
|
examples/underscored/trivial_string.py
|
jameswu1991/Underscore
|
d98273db3144cda79191d2c90f45d81b6d700b1f
|
[
"MIT"
] | 3
|
2016-09-23T01:13:15.000Z
|
2018-07-20T21:22:17.000Z
|
# x = 'x'
# print(x)
(__,) = ('x',)
_ = __
print _
(x,) = (_,)
| 8.25
| 14
| 0.333333
| 7
| 66
| 2.142857
| 0.285714
| 0.266667
| 0.933333
| 1.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.30303
| 66
| 7
| 15
| 9.428571
| 0.326087
| 0.257576
| 0
| 0
| 0
| 0
| 0.022222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d91dc5b856047f971e1f91ad846ea94d6e9ef2bf
| 16,859
|
py
|
Python
|
package/test/integrated_mongos_test.py
|
yektas/ambari-mongodb
|
f7cf763183b9aa85d1bc47a7918f3d05f61a69ee
|
[
"BSD-3-Clause"
] | null | null | null |
package/test/integrated_mongos_test.py
|
yektas/ambari-mongodb
|
f7cf763183b9aa85d1bc47a7918f3d05f61a69ee
|
[
"BSD-3-Clause"
] | null | null | null |
package/test/integrated_mongos_test.py
|
yektas/ambari-mongodb
|
f7cf763183b9aa85d1bc47a7918f3d05f61a69ee
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import sys
from time import sleep
# Ambari includes
from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.libraries.script import Script
# Custom service test classes includes
from integrated_base_test import IntegratedBaseTestCase
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PACKAGE_DIR = os.path.join(SCRIPT_DIR, '../scripts/')
SERVICE_DIR = os.path.join(SCRIPT_DIR, '../')
sys.path.append(PACKAGE_DIR)
sys.path.append(SERVICE_DIR)
# Custom service scripts includes
import params
from mongos import MongosServer
from mongo_base import InstanceConfig
from mongo_base import InstanceStatus
from mongo_config import MongoConfigServer
class IntegratedMongoConfTestCase(IntegratedBaseTestCase):
def setUp(self):
self.as_super = super(IntegratedMongoConfTestCase, self)
self.as_super.setUp()
self.config_server = None
params.try_interval = 4
params.times_to_try = 10
# Configuring and Installing mongo config dependencies
server = MongoConfigServer()
server.my_hostname = 'node1.test.com'
server.configure(self.env)
server.install(self.env)
# Configuring and Installing mongos dependencies
server = MongosServer()
server.my_hostname = 'node1.test.com'
server.configure(self.env)
server.install(self.env)
def tearDown(self):
self.as_super = super(IntegratedMongoConfTestCase, self)
self.as_super.tearDown()
if self.config_server:
self.config_server.stop(self.env)
def several_hosts_setup(self):
Script.config['clusterHostInfo'] = {
'mongos_hosts': ['node1.test.com','node2.test.com'],
'mongodb_hosts': [],
'mongodc_hosts': ['node1.test.com','node2.test.com','node3.test.com']
}
params.mongos_cluster_definition = ''
def several_hosts_setup_with_config_server(self):
Script.config['clusterHostInfo'] = {
'mongos_hosts': ['node1.test.com','node2.test.com'],
'mongodb_hosts': [],
'mongodc_hosts': ['node1.test.com']
}
params.mongos_cluster_definition = ''
# Starting the required config server
self.config_server = MongoConfigServer()
self.config_server.my_hostname = 'node1.test.com'
self.config_server.start(self.env)
expected_cluster_status_for_several_hosts_stopped = [
('0',['node1.test.com','node2.test.com'], [
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node2_0_0.pid',
final_db_path='/var/lib/mongodb/node2_0_0',
log_file='/var/log/mongodb/node2_0_0.log',
db_port='27017',
host_name='node2.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None)])]
def one_host_setup(self):
Script.config['clusterHostInfo'] = {
'mongos_hosts': ['node1.test.com'],
'mongodb_hosts': [],
'mongodc_hosts': ['node1.test.com']
}
params.mongos_cluster_definition = 'node1.test.com,node1.test.com'
self.config_server = MongoConfigServer()
self.config_server.my_hostname = 'node1.test.com'
self.config_server.start(self.env)
expected_cluster_status_for_one_host_stopped = [
('0',['node1.test.com','node1.test.com'], [
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_1.pid',
final_db_path='/var/lib/mongodb/node1_0_1',
log_file='/var/log/mongodb/node1_0_1.log',
db_port='27018',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None)])]
def test_get_cluster_data_with_one_host(self):
self.one_host_setup()
server = MongosServer()
server.my_hostname = 'node1.test.com'
expectedClusterData = [('0', ['node1.test.com', 'node1.test.com'],
[InstanceConfig(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False),
InstanceConfig(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_1.pid',
final_db_path='/var/lib/mongodb/node1_0_1',
log_file='/var/log/mongodb/node1_0_1.log',
db_port='27018',
host_name='node1.test.com',
is_arbiter=False)])]
clusterData = server.getClusterData()
self.assertEqual(clusterData,expectedClusterData,"The cluster data for the mongos is not right")
def test_get_cluster_status_with_one_host(self):
self.one_host_setup()
server = MongosServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus,self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the mongos is not right")
def test_stopping_an_already_stopped_cluster(self):
self.one_host_setup()
server = MongosServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus,self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the mongos is not right")
server.stop(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result after stopping the mongos is not right")
def test_mongos_in_one_host(self):
self.one_host_setup()
server = MongosServer()
server.my_hostname = 'node1.test.com'
with self.assertRaises(ComponentIsNotRunning):
server.status(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result before stating the mongos is not right")
server.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server.status(self.env)
expectedClusterStatus = [('0', ['node1.test.com', 'node1.test.com'], [
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None),
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_1.pid',
final_db_path='/var/lib/mongodb/node1_0_1',
log_file='/var/log/mongodb/node1_0_1.log',
db_port='27018',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None)])]
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatus,"The cluster status result for a started mongos is "
"not right")
server.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server.status(self.env)
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_one_host_stopped,
"The cluster status result after stopping the mongos is not right")
def test_get_cluster_status_with_several_hosts(self):
self.several_hosts_setup_with_config_server()
server = MongosServer()
server.my_hostname = 'node1.test.com'
clusterStatus = server.getClusterStatus(server.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
def test_mongos_with_several_hosts(self):
self.several_hosts_setup_with_config_server()
server2 = MongosServer()
server2.my_hostname = 'node2.test.com'
server1 = MongosServer()
server1.my_hostname = 'node1.test.com'
clusterStatus = server2.getClusterStatus(server2.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
server2.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server2.status(self.env)
expectedClusterStatusServer2On = [
('0',['node1.test.com','node2.test.com'], [
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False,
is_started=False,
is_repl_configurated=None,
repl_role=None),
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node2_0_0.pid',
final_db_path='/var/lib/mongodb/node2_0_0',
log_file='/var/log/mongodb/node2_0_0.log',
db_port='27017',
host_name='node2.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None)])]
clusterStatus = server2.getClusterStatus(server2.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatusServer2On, "The cluster status result for a started node2"
" in the mongos is not right")
server1.start(self.env)
sleep(self.SLEEP_INTERVAL_AFTER_START_A_INSTANCE)
server1.status(self.env)
expectedClusterStatusServer1On = [
('0',['node1.test.com','node2.test.com'], [
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node1_0_0.pid',
final_db_path='/var/lib/mongodb/node1_0_0',
log_file='/var/log/mongodb/node1_0_0.log',
db_port='27017',
host_name='node1.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None),
InstanceStatus(shard_name='0',
pid_file_name='/var/run/mongodb/node2_0_0.pid',
final_db_path='/var/lib/mongodb/node2_0_0',
log_file='/var/log/mongodb/node2_0_0.log',
db_port='27017',
host_name='node2.test.com',
is_arbiter=False,
is_started=True,
is_repl_configurated=False,
repl_role=None)])]
clusterStatus = server1.getClusterStatus(server1.getClusterData())
self.assertEqual(clusterStatus, expectedClusterStatusServer1On, "The cluster status result for a started node1"
" in the mongos is not right")
server2.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server2.status(self.env)
server1.stop(self.env)
with self.assertRaises(ComponentIsNotRunning):
server1.status(self.env)
clusterStatus = server2.getClusterStatus(server2.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result after stopping the mongos is not right")
def test_must_not_start_if_all_config_servers_are_off(self):
self.several_hosts_setup()
server1 = MongosServer()
server1.my_hostname = 'node1.test.com'
clusterStatus = server1.getClusterStatus(server1.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
server1.start(self.env)
clusterStatus = server1.getClusterStatus(server1.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
def test_must_not_start_if_no_config_servers_primary_on(self):
self.several_hosts_setup()
server1 = MongosServer()
server1.my_hostname = 'node1.test.com'
clusterStatus = server1.getClusterStatus(server1.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
# Starting only the secondary config servers
config_server2 = MongoConfigServer()
config_server2.my_hostname = 'node2.test.com'
config_server2.start(self.env)
config_server3 = MongoConfigServer()
config_server3.my_hostname = 'node3.test.com'
config_server3.start(self.env)
server1.start(self.env)
clusterStatus = server1.getClusterStatus(server1.getClusterData())
self.assertEqual(clusterStatus, self.expected_cluster_status_for_several_hosts_stopped,
"The cluster status result before stating the mongos is not right")
| 46.443526
| 119
| 0.572573
| 1,728
| 16,859
| 5.314815
| 0.085069
| 0.038872
| 0.049652
| 0.027439
| 0.832753
| 0.828397
| 0.803136
| 0.77341
| 0.742814
| 0.734756
| 0
| 0.025646
| 0.338514
| 16,859
| 362
| 120
| 46.571823
| 0.797884
| 0.0156
| 0
| 0.754209
| 0
| 0
| 0.181637
| 0.063962
| 0
| 0
| 0
| 0
| 0.06734
| 1
| 0.043771
| false
| 0
| 0.037037
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d9411fa6de1381e3bdc96f6766d8b3880690eec7
| 13,439
|
py
|
Python
|
courthouse/tensorflow/judge.py
|
heidariarash/Courthouse
|
bda59c8a86d6187720ddf2841d986c98e7d9b83e
|
[
"MIT"
] | null | null | null |
courthouse/tensorflow/judge.py
|
heidariarash/Courthouse
|
bda59c8a86d6187720ddf2841d986c98e7d9b83e
|
[
"MIT"
] | null | null | null |
courthouse/tensorflow/judge.py
|
heidariarash/Courthouse
|
bda59c8a86d6187720ddf2841d986c98e7d9b83e
|
[
"MIT"
] | null | null | null |
from courthouse.utils.case import CategoricalCase, NumericalCase
import numpy as np
import tensorflow as tf
class CategoricalJudge:
"""
Use this class to judge your model to see if it is fair or not.
"""
def __init__(self) -> None:
self.__org_data = None
self.__new_data = None
self.__old_case = None
self.__new_case = None
self.__org_out = None
self.__new_out = None
self.__output_type = None
def case(self, data: np.ndarray, change_from: CategoricalCase , change_towards: CategoricalCase) -> None:
"""
Use this method to specify the transformation from the old case to the new case
"""
self.__old_case = change_from
self.__new_case = change_towards
#binary
if change_from.get("binary") != -1:
self.__org_data = data[data[:, change_from.get("column")] == change_from.get("binary")]
self.__new_data = self.__org_data.copy()
self.__new_data[:, change_from.get("column")] = 1 - self.__new_data[:, change_from.get("column")]
#column to non column
elif change_towards.get("column") is None:
self.__org_data = data[data[:, change_from.get("column")] == 1]
self.__new_data = self.__org_data.copy()
self.__new_data[:, change_from.get("column")] = 0
#non column to column
elif type(change_from.get("column")) == list or type(change_from.get("column")) == tuple:
self.__org_data = data
for column in change_from.get("column"):
self.__org_data = self.__org_data[self.__org_data[:, column] == 0]
self.__new_data = self.__org_data.copy()
self.__new_data[:, change_towards.get("column")] = 1
#column to column
else:
self.__org_data = data[data[:, change_from.get("column")] == 1]
self.__new_data = self.__org_data.copy()
self.__new_data[:, change_from.get("column")] = 0
self.__new_data[:, change_towards.get("column")] = 1
def judge(self, model:tf.keras.Model, output_type: str) -> None:
"""
Use this method to judge your model fairness.
"""
org_predict = model.predict(self.__org_data)
new_predict = model.predict(self.__new_data)
self.__output_type = output_type
if output_type == "categorical":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(np.argmax(output))
for output in new_predict:
self.__new_out.append(np.argmax(output))
elif output_type == "binary_sigmoid":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(1 if output>=0.5 else 0)
for output in new_predict:
self.__new_out.append(1 if output>=0.5 else 0)
elif output_type == "binary_tanh":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(1 if output>=0 else 0)
for output in new_predict:
self.__new_out.append(1 if output>=0 else 0)
elif output_type == "regression":
self.__org_out = []
self.__new_out = []
self.__org_out.append(np.mean(org_predict))
self.__org_out.append(np.min(org_predict))
self.__org_out.append(np.max(org_predict))
self.__new_out.append(np.mean(new_predict))
self.__new_out.append(np.min(new_predict))
self.__new_out.append(np.max(new_predict))
else:
self.__output_type = None
raise Exception(f'{output_type} output_type is not defined.')
def verdict(self) -> str:
"""
Use this method to print the report of the fairness of the model.
"""
#checking if the model is actually judged
if self.__output_type is None:
print('No model has been judged yet.')
print(f"There are {self.__org_data.shape[0]} \"{self.__old_case.get('name')}\" to be found.\n")
print("When the model was applied to the original dataset, these results where obtained:")
if self.__output_type == "binary_sigmoid" or self.__output_type == "binary_tanh":
ones = sum(filter(lambda x: x==1, self.__org_out))
print(f"\t{ones} time(s) the model predicted 1. This is the case for {ones/len(self.__org_out)*100}% of the data.")
print(f"\t{len(self.__org_out) - ones} time(s) the model predicted 0. This is the case for {(1 - ones/len(self.__org_out))* 100}% of the data.\n")
print(f"Then the value of {self.__case.get("name")} changed.\n")
print("These results were obtained after applying the model on the new data.")
ones = sum(filter(lambda x: x==1, self.__new_out))
print(f"\t{ones} time(s) the model predicted 1. This is the case for {ones/len(self.__new_out) * 100}% of the data.")
print(f"\t{len(self.__new_out) - ones} time(s) the model predicted 0. This is the case for {(1 - ones/len(self.__new_out)) * 100}% of the data.")
elif self.__output_type == "categorical":
results = {}
for output in self.__org_out:
results[output] = results.get(output, 0) + 1
for key, value in results.items():
print(f"\t{value} time(s) the model predicted {key}. This is the case for {value/len(self.__org_out)*100}% of the data.")
print("\n")
print(f"Then the value of {self.__case.get("name")} changed.\n")
print("These results were obtained after applying the model on the new data.")
results = {}
for output in self.__new_out:
results[output] = results.get(output, 0) + 1
for key, value in results.items():
print(f"\t{value} time(s) the model predicted {key}. This is the case for {value/len(self.__org_out)*100}% of the data.")
elif self.__output_type == "regression":
print(f"\tMean of the predictions: {self.__org_out[0]}")
print(f"\tMinimum of the predictions: {self.__org_out[1]}")
print(f"\tMaximum of the predictions: {self.__org_out[2]}\n")
print(f"Then the value of {self.__case.get("name")} changed.\n")
print("These results were obtained after applying the model on the new data.")
print(f"\tMean of the predictions: {self.__new_out[0]}")
print(f"\tMaximum of the predictions: {self.__new_out[1]}")
print(f"\tMaximum of the predictions: {self.__new_out[2]}")
def faced_discrimination(self) -> list:
"""
Use this method to get a list of datapoints, for which the prediction would be different if the case was different.
"""
if self.__output_type == "regression":
print("You can not use this method on a regression problem.")
return
differnet = {}
for i, output in enumerate(self.__org_out):
if output != self.__new_out[i]:
differnet[i] = self.__org_data[i]
return differnet
class NumericalJudge:
"""
Use this class to judge your model to see if it is fair or not.
"""
def __init__(self) -> None:
self.__org_data = None
self.__new_data = None
self.__case = None
self.__org_out = None
self.__new_out = None
self.__output_type = None
def case(self, data: np.ndarray, case:NumericalCase, change_amount: int) -> None:
"""
Use this method to specify
"""
self.__case = case
self.__org_data = data
self.__new_data = data.copy()
self.__new_data[:, case.get("column")] = self.__new_data[:, case.get("column")] + change_amount
def judge(self, model:tf.keras.Model, output_type: str) -> None:
"""
Use this method to judge your model fairness.
"""
org_predict = model.predict(self.__org_data)
new_predict = model.predict(self.__new_data)
self.__output_type = output_type
if output_type == "categorical":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(np.argmax(output))
for output in new_predict:
self.__new_out.append(np.argmax(output))
elif output_type == "binary_sigmoid":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(1 if output>=0.5 else 0)
for output in new_predict:
self.__new_out.append(1 if output>=0.5 else 0)
elif output_type == "binary_tanh":
self.__org_out = []
self.__new_out = []
for output in org_predict:
self.__org_out.append(1 if output>=0 else 0)
for output in new_predict:
self.__new_out.append(1 if output>=0 else 0)
elif output_type == "regression":
self.__org_out = []
self.__new_out = []
self.__org_out.append(np.mean(org_predict))
self.__org_out.append(np.min(org_predict))
self.__org_out.append(np.max(org_predict))
self.__new_out.append(np.mean(new_predict))
self.__new_out.append(np.min(new_predict))
self.__new_out.append(np.max(new_predict))
else:
self.__output_type = None
raise Exception(f'{output_type} output_type is not defined.')
def verdict(self) -> str:
"""
Use this method to print the report of the fairness of the model.
"""
#checking if the model is actually judged
if self.__output_type is None:
print('No model has been judged yet.')
print(f"There are {self.__org_data.shape[0]} datapoint in original dataset.\n")
print("When the model was applied to the original dataset, these results where obtained:")
if self.__output_type == "binary_sigmoid" or self.__output_type == "binary_tanh":
ones = sum(filter(lambda x: x==1, self.__org_out))
print(f"\t{ones} time(s) the model predicted 1. This is the case for {ones/len(self.__org_out)*100}% of the data.")
print(f"\t{len(self.__org_out) - ones} time(s) the model predicted 0. This is the case for {(1 - ones/len(self.__org_out))* 100}% of the data.\n")
print(f"Then the value of {self.__case.get('name')} changed.")
print("\n")
print("These results were obtained after applying the model on the new data.")
ones = sum(filter(lambda x: x==1, self.__new_out))
print(f"\t{ones} time(s) the model predicted 1. This is the case for {ones/len(self.__new_out) * 100}% of the data.")
print(f"\t{len(self.__new_out) - ones} time(s) the model predicted 0. This is the case for {(1 - ones/len(self.__new_out)) * 100}% of the data.")
elif self.__output_type == "categorical":
results = {}
for output in self.__org_out:
results[output] = results.get(output, 0) + 1
for key, value in results.items():
print(f"\t{value} time(s) the model predicted {key}. This is the case for {value/len(self.__org_out)*100}% of the data.")
print("\n")
print(f"Then the value of {self.__case.get('name')} changed.")
print("\n")
print("These results were obtained after applying the model on the new data.")
results = {}
for output in self.__new_out:
results[output] = results.get(output, 0) + 1
for key, value in results.items():
print(f"\t{value} time(s) the model predicted {key}. This is the case for {value/len(self.__org_out)*100}% of the data.")
elif self.__output_type == "regression":
print(f"\tMean of the predictions: {self.__org_out[0]}")
print(f"\tMinimum of the predictions: {self.__org_out[1]}")
print(f"\tMaximum of the predictions: {self.__org_out[2]}\n")
print(f"Then the value of {self.__case.get('name')} changed.")
print("\n")
print("These results were obtained after applying the model on the new data.")
print(f"\tMean of the predictions: {self.__new_out[0]}")
print(f"\tMaximum of the predictions: {self.__new_out[1]}")
print(f"\tMaximum of the predictions: {self.__new_out[2]}")
def faced_discrimination(self) -> list:
"""
Use this method to get a list of datapoints, for which the prediction would be different if the case was different.
"""
if self.__output_type == "regression":
print("You can not use this method on a regression problem.")
return
differnet = {}
for i, output in enumerate(self.__org_out):
if output != self.__new_out[i]:
differnet[i] = self.__org_data[i]
return differnet
| 43.775244
| 158
| 0.58799
| 1,836
| 13,439
| 4.014706
| 0.080065
| 0.060779
| 0.059693
| 0.026048
| 0.916294
| 0.908018
| 0.889974
| 0.887397
| 0.880478
| 0.873694
| 0
| 0.011737
| 0.296302
| 13,439
| 307
| 159
| 43.775244
| 0.767685
| 0.010566
| 0
| 0.888889
| 0
| 0.055556
| 0.286752
| 0.048357
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013889
| null | null | 0.226852
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
79c87069a7de3a0031b84ece57ba9c912f146abd
| 2,351
|
py
|
Python
|
dynamic_json/json_wrappers.py
|
childsish/dynamic-json
|
3a37caba373a4da8c4eb40c5ac11c88e09875451
|
[
"MIT"
] | 1
|
2021-02-08T16:41:55.000Z
|
2021-02-08T16:41:55.000Z
|
dynamic_json/json_wrappers.py
|
childsish/dynamic-json
|
3a37caba373a4da8c4eb40c5ac11c88e09875451
|
[
"MIT"
] | null | null | null |
dynamic_json/json_wrappers.py
|
childsish/dynamic-json
|
3a37caba373a4da8c4eb40c5ac11c88e09875451
|
[
"MIT"
] | null | null | null |
from collections.abc import Mapping, Sequence
class JsonDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super().__setattr__('_root', self)
def __getattr__(self, key):
if key in self:
return self[key]
return super().__getattribute__(key)
def __setattr__(self, key, value):
self[key] = value
def __getitem__(self, key):
v = super().__getitem__(key)
if isinstance(v, str):
v = v.format(**super().__getattribute__('_root'))
return v
def __setitem__(self, key, value):
if isinstance(value, Mapping) and not isinstance(value, JsonDict):
value = JsonDict(value)
value.set_as_root(super().__getattribute__('_root'))
elif isinstance(value, Sequence) and not isinstance(value, (str, JsonList)):
value = JsonList(value)
value.set_as_root(super().__getattribute__('_root'))
super().__setitem__(key, value)
def set_as_root(self, root=None):
if root is not None:
super().__setattr__('_root', root)
for k, v in self.items():
if hasattr(v, 'set_as_root'):
v.set_as_root(super().__getattribute__('_root'))
class JsonList(list):
ROOT_NAME = 'root'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
super().__setattr__('_root', {JsonList.ROOT_NAME: self})
def __getitem__(self, key):
v = super().__getitem__(key)
if isinstance(v, str):
v = v.format(**super().__getattribute__('_root'))
return v
def __setitem__(self, key, value):
if isinstance(value, Mapping) and not isinstance(value, JsonDict):
value = JsonDict(value)
value.set_as_root(super().__getattribute__('_root'))
elif isinstance(value, Sequence) and not isinstance(value, (str, JsonList)):
value = JsonList(value)
value.set_as_root(super().__getattribute__('_root'))
super().__setitem__(key, value)
def set_as_root(self, root=None):
if root is not None:
super().__setattr__('_root', root)
for v in self:
if hasattr(v, 'set_as_root'):
v.set_as_root(super().__getattribute__('_root'))
| 34.072464
| 84
| 0.594641
| 271
| 2,351
| 4.634686
| 0.162362
| 0.039809
| 0.071656
| 0.066879
| 0.816879
| 0.816879
| 0.816879
| 0.816879
| 0.816879
| 0.816879
| 0
| 0
| 0.272225
| 2,351
| 68
| 85
| 34.573529
| 0.734074
| 0
| 0
| 0.740741
| 0
| 0
| 0.03658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.185185
| false
| 0
| 0.018519
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79c9c303a7d16e879d6f35f7bb708f6f2fdecba8
| 102,604
|
py
|
Python
|
usr/lib64/python2.6/site-packages/libsvn/client.py
|
devop-mmcgrath/openshift-svn-cartridge
|
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
|
[
"Apache-2.0"
] | 2
|
2017-09-28T15:02:43.000Z
|
2018-02-09T05:52:33.000Z
|
usr/lib64/python2.6/site-packages/libsvn/client.py
|
devop-mmcgrath/openshift-svn-cartridge
|
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
|
[
"Apache-2.0"
] | null | null | null |
usr/lib64/python2.6/site-packages/libsvn/client.py
|
devop-mmcgrath/openshift-svn-cartridge
|
6cfe801adcdb68186a8c420b420ff6c0ccaadbb5
|
[
"Apache-2.0"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.36
#
# Don't modify this file, modify the SWIG interface instead.
# This file is compatible with both classic and new-style classes.
import _client
import new
new_instancemethod = new.instancemethod
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'PySwigObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
import core
import delta
import wc
import ra
def svn_client_version(*args):
"""svn_client_version() -> svn_version_t"""
return apply(_client.svn_client_version, args)
def svn_client_get_simple_prompt_provider(*args):
"""
svn_client_get_simple_prompt_provider(svn_auth_provider_object_t provider, svn_auth_simple_prompt_func_t prompt_func,
int retry_limit,
apr_pool_t pool)
"""
return apply(_client.svn_client_get_simple_prompt_provider, args)
def svn_client_get_username_prompt_provider(*args):
"""
svn_client_get_username_prompt_provider(svn_auth_provider_object_t provider, svn_auth_username_prompt_func_t prompt_func,
int retry_limit,
apr_pool_t pool)
"""
return apply(_client.svn_client_get_username_prompt_provider, args)
def svn_client_get_simple_provider(*args):
"""svn_client_get_simple_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
return apply(_client.svn_client_get_simple_provider, args)
def svn_client_get_username_provider(*args):
"""svn_client_get_username_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
return apply(_client.svn_client_get_username_provider, args)
def svn_client_get_ssl_server_trust_file_provider(*args):
"""svn_client_get_ssl_server_trust_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
return apply(_client.svn_client_get_ssl_server_trust_file_provider, args)
def svn_client_get_ssl_client_cert_file_provider(*args):
"""svn_client_get_ssl_client_cert_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
return apply(_client.svn_client_get_ssl_client_cert_file_provider, args)
def svn_client_get_ssl_client_cert_pw_file_provider(*args):
"""svn_client_get_ssl_client_cert_pw_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
return apply(_client.svn_client_get_ssl_client_cert_pw_file_provider, args)
def svn_client_get_ssl_server_trust_prompt_provider(*args):
"""
svn_client_get_ssl_server_trust_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_server_trust_prompt_func_t prompt_func,
apr_pool_t pool)
"""
return apply(_client.svn_client_get_ssl_server_trust_prompt_provider, args)
def svn_client_get_ssl_client_cert_prompt_provider(*args):
"""
svn_client_get_ssl_client_cert_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_prompt_func_t prompt_func,
int retry_limit,
apr_pool_t pool)
"""
return apply(_client.svn_client_get_ssl_client_cert_prompt_provider, args)
def svn_client_get_ssl_client_cert_pw_prompt_provider(*args):
"""
svn_client_get_ssl_client_cert_pw_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_pw_prompt_func_t prompt_func,
int retry_limit,
apr_pool_t pool)
"""
return apply(_client.svn_client_get_ssl_client_cert_pw_prompt_provider, args)
def svn_client_proplist_item_dup(*args):
"""svn_client_proplist_item_dup( item, apr_pool_t pool)"""
return apply(_client.svn_client_proplist_item_dup, args)
class svn_client_commit_info_t:
"""Proxy of C svn_client_commit_info_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_info_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_info_t, name)
__repr__ = _swig_repr
__swig_setmethods__["revision"] = _client.svn_client_commit_info_t_revision_set
__swig_getmethods__["revision"] = _client.svn_client_commit_info_t_revision_get
__swig_setmethods__["date"] = _client.svn_client_commit_info_t_date_set
__swig_getmethods__["date"] = _client.svn_client_commit_info_t_date_get
__swig_setmethods__["author"] = _client.svn_client_commit_info_t_author_set
__swig_getmethods__["author"] = _client.svn_client_commit_info_t_author_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_commit_info_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_commit_info_t"""
this = apply(_client.new_svn_client_commit_info_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_commit_info_t
__del__ = lambda self : None;
svn_client_commit_info_t_swigregister = _client.svn_client_commit_info_t_swigregister
svn_client_commit_info_t_swigregister(svn_client_commit_info_t)
SVN_CLIENT_COMMIT_ITEM_ADD = _client.SVN_CLIENT_COMMIT_ITEM_ADD
SVN_CLIENT_COMMIT_ITEM_DELETE = _client.SVN_CLIENT_COMMIT_ITEM_DELETE
SVN_CLIENT_COMMIT_ITEM_TEXT_MODS = _client.SVN_CLIENT_COMMIT_ITEM_TEXT_MODS
SVN_CLIENT_COMMIT_ITEM_PROP_MODS = _client.SVN_CLIENT_COMMIT_ITEM_PROP_MODS
SVN_CLIENT_COMMIT_ITEM_IS_COPY = _client.SVN_CLIENT_COMMIT_ITEM_IS_COPY
SVN_CLIENT_COMMIT_ITEM_LOCK_TOKEN = _client.SVN_CLIENT_COMMIT_ITEM_LOCK_TOKEN
class svn_client_commit_item3_t:
"""Proxy of C svn_client_commit_item3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item3_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _client.svn_client_commit_item3_t_path_set
__swig_getmethods__["path"] = _client.svn_client_commit_item3_t_path_get
__swig_setmethods__["kind"] = _client.svn_client_commit_item3_t_kind_set
__swig_getmethods__["kind"] = _client.svn_client_commit_item3_t_kind_get
__swig_setmethods__["url"] = _client.svn_client_commit_item3_t_url_set
__swig_getmethods__["url"] = _client.svn_client_commit_item3_t_url_get
__swig_setmethods__["revision"] = _client.svn_client_commit_item3_t_revision_set
__swig_getmethods__["revision"] = _client.svn_client_commit_item3_t_revision_get
__swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item3_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item3_t_copyfrom_url_get
__swig_setmethods__["copyfrom_rev"] = _client.svn_client_commit_item3_t_copyfrom_rev_set
__swig_getmethods__["copyfrom_rev"] = _client.svn_client_commit_item3_t_copyfrom_rev_get
__swig_setmethods__["state_flags"] = _client.svn_client_commit_item3_t_state_flags_set
__swig_getmethods__["state_flags"] = _client.svn_client_commit_item3_t_state_flags_get
__swig_setmethods__["incoming_prop_changes"] = _client.svn_client_commit_item3_t_incoming_prop_changes_set
__swig_getmethods__["incoming_prop_changes"] = _client.svn_client_commit_item3_t_incoming_prop_changes_get
__swig_setmethods__["outgoing_prop_changes"] = _client.svn_client_commit_item3_t_outgoing_prop_changes_set
__swig_getmethods__["outgoing_prop_changes"] = _client.svn_client_commit_item3_t_outgoing_prop_changes_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_commit_item3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_commit_item3_t"""
this = apply(_client.new_svn_client_commit_item3_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_commit_item3_t
__del__ = lambda self : None;
svn_client_commit_item3_t_swigregister = _client.svn_client_commit_item3_t_swigregister
svn_client_commit_item3_t_swigregister(svn_client_commit_item3_t)
class svn_client_commit_item2_t:
"""Proxy of C svn_client_commit_item2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item2_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _client.svn_client_commit_item2_t_path_set
__swig_getmethods__["path"] = _client.svn_client_commit_item2_t_path_get
__swig_setmethods__["kind"] = _client.svn_client_commit_item2_t_kind_set
__swig_getmethods__["kind"] = _client.svn_client_commit_item2_t_kind_get
__swig_setmethods__["url"] = _client.svn_client_commit_item2_t_url_set
__swig_getmethods__["url"] = _client.svn_client_commit_item2_t_url_get
__swig_setmethods__["revision"] = _client.svn_client_commit_item2_t_revision_set
__swig_getmethods__["revision"] = _client.svn_client_commit_item2_t_revision_get
__swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item2_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item2_t_copyfrom_url_get
__swig_setmethods__["copyfrom_rev"] = _client.svn_client_commit_item2_t_copyfrom_rev_set
__swig_getmethods__["copyfrom_rev"] = _client.svn_client_commit_item2_t_copyfrom_rev_get
__swig_setmethods__["state_flags"] = _client.svn_client_commit_item2_t_state_flags_set
__swig_getmethods__["state_flags"] = _client.svn_client_commit_item2_t_state_flags_get
__swig_setmethods__["wcprop_changes"] = _client.svn_client_commit_item2_t_wcprop_changes_set
__swig_getmethods__["wcprop_changes"] = _client.svn_client_commit_item2_t_wcprop_changes_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_commit_item2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_commit_item2_t"""
this = apply(_client.new_svn_client_commit_item2_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_commit_item2_t
__del__ = lambda self : None;
svn_client_commit_item2_t_swigregister = _client.svn_client_commit_item2_t_swigregister
svn_client_commit_item2_t_swigregister(svn_client_commit_item2_t)
class svn_client_commit_item_t:
"""Proxy of C svn_client_commit_item_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _client.svn_client_commit_item_t_path_set
__swig_getmethods__["path"] = _client.svn_client_commit_item_t_path_get
__swig_setmethods__["kind"] = _client.svn_client_commit_item_t_kind_set
__swig_getmethods__["kind"] = _client.svn_client_commit_item_t_kind_get
__swig_setmethods__["url"] = _client.svn_client_commit_item_t_url_set
__swig_getmethods__["url"] = _client.svn_client_commit_item_t_url_get
__swig_setmethods__["revision"] = _client.svn_client_commit_item_t_revision_set
__swig_getmethods__["revision"] = _client.svn_client_commit_item_t_revision_get
__swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item_t_copyfrom_url_get
__swig_setmethods__["state_flags"] = _client.svn_client_commit_item_t_state_flags_set
__swig_getmethods__["state_flags"] = _client.svn_client_commit_item_t_state_flags_get
__swig_setmethods__["wcprop_changes"] = _client.svn_client_commit_item_t_wcprop_changes_set
__swig_getmethods__["wcprop_changes"] = _client.svn_client_commit_item_t_wcprop_changes_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_commit_item_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_commit_item_t"""
this = apply(_client.new_svn_client_commit_item_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_commit_item_t
__del__ = lambda self : None;
svn_client_commit_item_t_swigregister = _client.svn_client_commit_item_t_swigregister
svn_client_commit_item_t_swigregister(svn_client_commit_item_t)
def svn_client_commit_item3_create(*args):
"""svn_client_commit_item3_create(apr_pool_t pool) -> svn_client_commit_item3_t"""
return apply(_client.svn_client_commit_item3_create, args)
def svn_client_commit_item_create(*args):
"""svn_client_commit_item_create(svn_client_commit_item3_t item, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_commit_item_create, args)
def svn_client_commit_item3_dup(*args):
"""svn_client_commit_item3_dup(svn_client_commit_item3_t item, apr_pool_t pool) -> svn_client_commit_item3_t"""
return apply(_client.svn_client_commit_item3_dup, args)
def svn_client_commit_item2_dup(*args):
"""svn_client_commit_item2_dup(svn_client_commit_item2_t item, apr_pool_t pool) -> svn_client_commit_item2_t"""
return apply(_client.svn_client_commit_item2_dup, args)
svn_client_diff_summarize_kind_normal = _client.svn_client_diff_summarize_kind_normal
svn_client_diff_summarize_kind_added = _client.svn_client_diff_summarize_kind_added
svn_client_diff_summarize_kind_modified = _client.svn_client_diff_summarize_kind_modified
svn_client_diff_summarize_kind_deleted = _client.svn_client_diff_summarize_kind_deleted
class svn_client_diff_summarize_t:
"""Proxy of C svn_client_diff_summarize_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_diff_summarize_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_diff_summarize_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _client.svn_client_diff_summarize_t_path_set
__swig_getmethods__["path"] = _client.svn_client_diff_summarize_t_path_get
__swig_setmethods__["summarize_kind"] = _client.svn_client_diff_summarize_t_summarize_kind_set
__swig_getmethods__["summarize_kind"] = _client.svn_client_diff_summarize_t_summarize_kind_get
__swig_setmethods__["prop_changed"] = _client.svn_client_diff_summarize_t_prop_changed_set
__swig_getmethods__["prop_changed"] = _client.svn_client_diff_summarize_t_prop_changed_get
__swig_setmethods__["node_kind"] = _client.svn_client_diff_summarize_t_node_kind_set
__swig_getmethods__["node_kind"] = _client.svn_client_diff_summarize_t_node_kind_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_diff_summarize_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_diff_summarize_t"""
this = apply(_client.new_svn_client_diff_summarize_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_diff_summarize_t
__del__ = lambda self : None;
svn_client_diff_summarize_t_swigregister = _client.svn_client_diff_summarize_t_swigregister
svn_client_diff_summarize_t_swigregister(svn_client_diff_summarize_t)
def svn_client_diff_summarize_dup(*args):
"""svn_client_diff_summarize_dup(svn_client_diff_summarize_t diff, apr_pool_t pool) -> svn_client_diff_summarize_t"""
return apply(_client.svn_client_diff_summarize_dup, args)
class svn_client_ctx_t:
"""Proxy of C svn_client_ctx_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_ctx_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_ctx_t, name)
__repr__ = _swig_repr
__swig_setmethods__["auth_baton"] = _client.svn_client_ctx_t_auth_baton_set
__swig_getmethods__["auth_baton"] = _client.svn_client_ctx_t_auth_baton_get
__swig_setmethods__["notify_func"] = _client.svn_client_ctx_t_notify_func_set
__swig_getmethods__["notify_func"] = _client.svn_client_ctx_t_notify_func_get
__swig_setmethods__["notify_baton"] = _client.svn_client_ctx_t_notify_baton_set
__swig_getmethods__["notify_baton"] = _client.svn_client_ctx_t_notify_baton_get
__swig_setmethods__["log_msg_func"] = _client.svn_client_ctx_t_log_msg_func_set
__swig_getmethods__["log_msg_func"] = _client.svn_client_ctx_t_log_msg_func_get
__swig_setmethods__["log_msg_baton"] = _client.svn_client_ctx_t_log_msg_baton_set
__swig_getmethods__["log_msg_baton"] = _client.svn_client_ctx_t_log_msg_baton_get
__swig_setmethods__["config"] = _client.svn_client_ctx_t_config_set
__swig_getmethods__["config"] = _client.svn_client_ctx_t_config_get
__swig_setmethods__["cancel_func"] = _client.svn_client_ctx_t_cancel_func_set
__swig_getmethods__["cancel_func"] = _client.svn_client_ctx_t_cancel_func_get
__swig_setmethods__["cancel_baton"] = _client.svn_client_ctx_t_cancel_baton_set
__swig_getmethods__["cancel_baton"] = _client.svn_client_ctx_t_cancel_baton_get
__swig_setmethods__["notify_func2"] = _client.svn_client_ctx_t_notify_func2_set
__swig_getmethods__["notify_func2"] = _client.svn_client_ctx_t_notify_func2_get
__swig_setmethods__["notify_baton2"] = _client.svn_client_ctx_t_notify_baton2_set
__swig_getmethods__["notify_baton2"] = _client.svn_client_ctx_t_notify_baton2_get
__swig_setmethods__["log_msg_func2"] = _client.svn_client_ctx_t_log_msg_func2_set
__swig_getmethods__["log_msg_func2"] = _client.svn_client_ctx_t_log_msg_func2_get
__swig_setmethods__["log_msg_baton2"] = _client.svn_client_ctx_t_log_msg_baton2_set
__swig_getmethods__["log_msg_baton2"] = _client.svn_client_ctx_t_log_msg_baton2_get
__swig_setmethods__["progress_func"] = _client.svn_client_ctx_t_progress_func_set
__swig_getmethods__["progress_func"] = _client.svn_client_ctx_t_progress_func_get
__swig_setmethods__["progress_baton"] = _client.svn_client_ctx_t_progress_baton_set
__swig_getmethods__["progress_baton"] = _client.svn_client_ctx_t_progress_baton_get
__swig_setmethods__["log_msg_func3"] = _client.svn_client_ctx_t_log_msg_func3_set
__swig_getmethods__["log_msg_func3"] = _client.svn_client_ctx_t_log_msg_func3_get
__swig_setmethods__["log_msg_baton3"] = _client.svn_client_ctx_t_log_msg_baton3_set
__swig_getmethods__["log_msg_baton3"] = _client.svn_client_ctx_t_log_msg_baton3_get
__swig_setmethods__["mimetypes_map"] = _client.svn_client_ctx_t_mimetypes_map_set
__swig_getmethods__["mimetypes_map"] = _client.svn_client_ctx_t_mimetypes_map_get
__swig_setmethods__["conflict_func"] = _client.svn_client_ctx_t_conflict_func_set
__swig_getmethods__["conflict_func"] = _client.svn_client_ctx_t_conflict_func_get
__swig_setmethods__["conflict_baton"] = _client.svn_client_ctx_t_conflict_baton_set
__swig_getmethods__["conflict_baton"] = _client.svn_client_ctx_t_conflict_baton_get
__swig_setmethods__["client_name"] = _client.svn_client_ctx_t_client_name_set
__swig_getmethods__["client_name"] = _client.svn_client_ctx_t_client_name_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_ctx_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_ctx_t"""
this = apply(_client.new_svn_client_ctx_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_ctx_t
__del__ = lambda self : None;
svn_client_ctx_t_swigregister = _client.svn_client_ctx_t_swigregister
svn_client_ctx_t_swigregister(svn_client_ctx_t)
def svn_client_create_context(*args):
"""svn_client_create_context(svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_create_context, args)
SVN_CLIENT_AUTH_USERNAME = _client.SVN_CLIENT_AUTH_USERNAME
SVN_CLIENT_AUTH_PASSWORD = _client.SVN_CLIENT_AUTH_PASSWORD
def svn_client_args_to_target_array(*args):
"""
svn_client_args_to_target_array(apr_array_header_t targets_p, apr_getopt_t os, apr_array_header_t known_targets,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_args_to_target_array, args)
def svn_client_checkout3(*args):
"""
svn_client_checkout3(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_depth_t depth, svn_boolean_t ignore_externals,
svn_boolean_t allow_unver_obstructions,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_checkout3, args)
def svn_client_checkout2(*args):
"""
svn_client_checkout2(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t recurse, svn_boolean_t ignore_externals,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_checkout2, args)
def svn_client_checkout(*args):
"""
svn_client_checkout(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t revision,
svn_boolean_t recurse,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_checkout, args)
def svn_client_update3(*args):
"""
svn_client_update3(apr_array_header_t result_revs, apr_array_header_t paths,
svn_opt_revision_t revision, svn_depth_t depth,
svn_boolean_t depth_is_sticky, svn_boolean_t ignore_externals,
svn_boolean_t allow_unver_obstructions,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_update3, args)
def svn_client_update2(*args):
"""
svn_client_update2(apr_array_header_t result_revs, apr_array_header_t paths,
svn_opt_revision_t revision, svn_boolean_t recurse,
svn_boolean_t ignore_externals,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_update2, args)
def svn_client_update(*args):
"""
svn_client_update(svn_revnum_t result_rev, char path, svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_update, args)
def svn_client_switch2(*args):
"""
svn_client_switch2(svn_revnum_t result_rev, char path, char url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_depth_t depth, svn_boolean_t depth_is_sticky,
svn_boolean_t ignore_externals,
svn_boolean_t allow_unver_obstructions, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_switch2, args)
def svn_client_switch(*args):
"""
svn_client_switch(svn_revnum_t result_rev, char path, char url, svn_opt_revision_t revision,
svn_boolean_t recurse,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_switch, args)
def svn_client_add4(*args):
"""
svn_client_add4(char path, svn_depth_t depth, svn_boolean_t force,
svn_boolean_t no_ignore, svn_boolean_t add_parents,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_add4, args)
def svn_client_add3(*args):
"""
svn_client_add3(char path, svn_boolean_t recursive, svn_boolean_t force,
svn_boolean_t no_ignore, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_add3, args)
def svn_client_add2(*args):
"""
svn_client_add2(char path, svn_boolean_t recursive, svn_boolean_t force,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_add2, args)
def svn_client_add(*args):
"""
svn_client_add(char path, svn_boolean_t recursive, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_add, args)
def svn_client_mkdir3(*args):
"""
svn_client_mkdir3(svn_commit_info_t commit_info_p, apr_array_header_t paths,
svn_boolean_t make_parents, apr_hash_t revprop_table,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mkdir3, args)
def svn_client_mkdir2(*args):
"""
svn_client_mkdir2(svn_commit_info_t commit_info_p, apr_array_header_t paths,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mkdir2, args)
def svn_client_mkdir(*args):
"""
svn_client_mkdir(svn_client_commit_info_t commit_info_p, apr_array_header_t paths,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mkdir, args)
def svn_client_delete3(*args):
"""
svn_client_delete3(svn_commit_info_t commit_info_p, apr_array_header_t paths,
svn_boolean_t force, svn_boolean_t keep_local,
apr_hash_t revprop_table, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_delete3, args)
def svn_client_delete2(*args):
"""
svn_client_delete2(svn_commit_info_t commit_info_p, apr_array_header_t paths,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_delete2, args)
def svn_client_delete(*args):
"""
svn_client_delete(svn_client_commit_info_t commit_info_p, apr_array_header_t paths,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_delete, args)
def svn_client_import3(*args):
"""
svn_client_import3(svn_commit_info_t commit_info_p, char path, char url,
svn_depth_t depth, svn_boolean_t no_ignore,
svn_boolean_t ignore_unknown_node_types, apr_hash_t revprop_table,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_import3, args)
def svn_client_import2(*args):
"""
svn_client_import2(svn_commit_info_t commit_info_p, char path, char url,
svn_boolean_t nonrecursive, svn_boolean_t no_ignore,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_import2, args)
def svn_client_import(*args):
"""
svn_client_import(svn_client_commit_info_t commit_info_p, char path,
char url, svn_boolean_t nonrecursive, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_import, args)
def svn_client_commit4(*args):
"""
svn_client_commit4(svn_commit_info_t commit_info_p, apr_array_header_t targets,
svn_depth_t depth, svn_boolean_t keep_locks,
svn_boolean_t keep_changelists, apr_array_header_t changelists,
apr_hash_t revprop_table,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_commit4, args)
def svn_client_commit3(*args):
"""
svn_client_commit3(svn_commit_info_t commit_info_p, apr_array_header_t targets,
svn_boolean_t recurse, svn_boolean_t keep_locks,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_commit3, args)
def svn_client_commit2(*args):
"""
svn_client_commit2(svn_client_commit_info_t commit_info_p, apr_array_header_t targets,
svn_boolean_t recurse, svn_boolean_t keep_locks,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_commit2, args)
def svn_client_commit(*args):
"""
svn_client_commit(svn_client_commit_info_t commit_info_p, apr_array_header_t targets,
svn_boolean_t nonrecursive,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_commit, args)
def svn_client_status4(*args):
"""
svn_client_status4(svn_revnum_t result_rev, char path, svn_opt_revision_t revision,
svn_wc_status_func3_t status_func,
void status_baton, svn_depth_t depth, svn_boolean_t get_all,
svn_boolean_t update, svn_boolean_t no_ignore,
svn_boolean_t ignore_externals,
apr_array_header_t changelists, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_status4, args)
def svn_client_status3(*args):
"""
svn_client_status3(svn_revnum_t result_rev, char path, svn_opt_revision_t revision,
svn_wc_status_func2_t status_func,
svn_depth_t depth, svn_boolean_t get_all,
svn_boolean_t update, svn_boolean_t no_ignore,
svn_boolean_t ignore_externals, apr_array_header_t changelists,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_status3, args)
def svn_client_status2(*args):
"""
svn_client_status2(svn_revnum_t result_rev, char path, svn_opt_revision_t revision,
svn_wc_status_func2_t status_func,
svn_boolean_t recurse, svn_boolean_t get_all,
svn_boolean_t update, svn_boolean_t no_ignore,
svn_boolean_t ignore_externals, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_status2, args)
def svn_client_status(*args):
"""
svn_client_status(svn_revnum_t result_rev, char path, svn_opt_revision_t revision,
svn_wc_status_func_t status_func,
svn_boolean_t recurse, svn_boolean_t get_all,
svn_boolean_t update, svn_boolean_t no_ignore,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_status, args)
def svn_client_log5(*args):
"""
svn_client_log5(apr_array_header_t targets, svn_opt_revision_t peg_revision,
apr_array_header_t revision_ranges,
int limit, svn_boolean_t discover_changed_paths,
svn_boolean_t strict_node_history, svn_boolean_t include_merged_revisions,
apr_array_header_t revprops,
svn_log_entry_receiver_t receiver,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_log5, args)
def svn_client_log4(*args):
"""
svn_client_log4(apr_array_header_t targets, svn_opt_revision_t peg_revision,
svn_opt_revision_t start, svn_opt_revision_t end,
int limit, svn_boolean_t discover_changed_paths,
svn_boolean_t strict_node_history,
svn_boolean_t include_merged_revisions,
apr_array_header_t revprops, svn_log_entry_receiver_t receiver,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_log4, args)
def svn_client_log3(*args):
"""
svn_client_log3(apr_array_header_t targets, svn_opt_revision_t peg_revision,
svn_opt_revision_t start, svn_opt_revision_t end,
int limit, svn_boolean_t discover_changed_paths,
svn_boolean_t strict_node_history,
svn_log_message_receiver_t receiver,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_log3, args)
def svn_client_log2(*args):
"""
svn_client_log2(apr_array_header_t targets, svn_opt_revision_t start,
svn_opt_revision_t end, int limit, svn_boolean_t discover_changed_paths,
svn_boolean_t strict_node_history,
svn_log_message_receiver_t receiver,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_log2, args)
def svn_client_log(*args):
"""
svn_client_log(apr_array_header_t targets, svn_opt_revision_t start,
svn_opt_revision_t end, svn_boolean_t discover_changed_paths,
svn_boolean_t strict_node_history,
svn_log_message_receiver_t receiver,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_log, args)
def svn_client_blame4(*args):
"""
svn_client_blame4(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t start, svn_opt_revision_t end,
svn_diff_file_options_t diff_options, svn_boolean_t ignore_mime_type,
svn_boolean_t include_merged_revisions,
svn_client_blame_receiver2_t receiver,
void receiver_baton, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_blame4, args)
def svn_client_blame3(*args):
"""
svn_client_blame3(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t start, svn_opt_revision_t end,
svn_diff_file_options_t diff_options, svn_boolean_t ignore_mime_type,
svn_client_blame_receiver_t receiver,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_blame3, args)
def svn_client_blame2(*args):
"""
svn_client_blame2(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t start, svn_opt_revision_t end,
svn_client_blame_receiver_t receiver, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_blame2, args)
def svn_client_blame(*args):
"""
svn_client_blame(char path_or_url, svn_opt_revision_t start, svn_opt_revision_t end,
svn_client_blame_receiver_t receiver,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_blame, args)
def svn_client_diff4(*args):
"""
svn_client_diff4(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1,
char path2, svn_opt_revision_t revision2,
char relative_to_dir, svn_depth_t depth,
svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type,
char header_encoding,
apr_file_t outfile, apr_file_t errfile, apr_array_header_t changelists,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff4, args)
def svn_client_diff3(*args):
"""
svn_client_diff3(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1,
char path2, svn_opt_revision_t revision2,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted,
svn_boolean_t ignore_content_type,
char header_encoding, apr_file_t outfile,
apr_file_t errfile, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff3, args)
def svn_client_diff2(*args):
"""
svn_client_diff2(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1,
char path2, svn_opt_revision_t revision2,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted,
svn_boolean_t ignore_content_type,
apr_file_t outfile, apr_file_t errfile,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff2, args)
def svn_client_diff(*args):
"""
svn_client_diff(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1,
char path2, svn_opt_revision_t revision2,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted,
apr_file_t outfile, apr_file_t errfile,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff, args)
def svn_client_diff_peg4(*args):
"""
svn_client_diff_peg4(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
char relative_to_dir, svn_depth_t depth,
svn_boolean_t ignore_ancestry, svn_boolean_t no_diff_deleted,
svn_boolean_t ignore_content_type,
char header_encoding, apr_file_t outfile,
apr_file_t errfile, apr_array_header_t changelists,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_peg4, args)
def svn_client_diff_peg3(*args):
"""
svn_client_diff_peg3(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type,
char header_encoding,
apr_file_t outfile, apr_file_t errfile,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_peg3, args)
def svn_client_diff_peg2(*args):
"""
svn_client_diff_peg2(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type,
apr_file_t outfile,
apr_file_t errfile, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_peg2, args)
def svn_client_diff_peg(*args):
"""
svn_client_diff_peg(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision,
svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t no_diff_deleted, apr_file_t outfile,
apr_file_t errfile, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_peg, args)
def svn_client_diff_summarize2(*args):
"""
svn_client_diff_summarize2(char path1, svn_opt_revision_t revision1, char path2,
svn_opt_revision_t revision2, svn_depth_t depth,
svn_boolean_t ignore_ancestry, apr_array_header_t changelists,
svn_client_diff_summarize_func_t summarize_func,
void summarize_baton,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_summarize2, args)
def svn_client_diff_summarize(*args):
"""
svn_client_diff_summarize(char path1, svn_opt_revision_t revision1, char path2,
svn_opt_revision_t revision2, svn_boolean_t recurse,
svn_boolean_t ignore_ancestry, svn_client_diff_summarize_func_t summarize_func,
void summarize_baton,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_summarize, args)
def svn_client_diff_summarize_peg2(*args):
"""
svn_client_diff_summarize_peg2(char path, svn_opt_revision_t peg_revision, svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
svn_depth_t depth, svn_boolean_t ignore_ancestry,
apr_array_header_t changelists,
svn_client_diff_summarize_func_t summarize_func,
void summarize_baton, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_summarize_peg2, args)
def svn_client_diff_summarize_peg(*args):
"""
svn_client_diff_summarize_peg(char path, svn_opt_revision_t peg_revision, svn_opt_revision_t start_revision,
svn_opt_revision_t end_revision,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_client_diff_summarize_func_t summarize_func,
void summarize_baton,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_diff_summarize_peg, args)
def svn_client_merge3(*args):
"""
svn_client_merge3(char source1, svn_opt_revision_t revision1, char source2,
svn_opt_revision_t revision2, char target_wcpath,
svn_depth_t depth, svn_boolean_t ignore_ancestry,
svn_boolean_t force, svn_boolean_t record_only,
svn_boolean_t dry_run, apr_array_header_t merge_options,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge3, args)
def svn_client_merge2(*args):
"""
svn_client_merge2(char source1, svn_opt_revision_t revision1, char source2,
svn_opt_revision_t revision2, char target_wcpath,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t force, svn_boolean_t dry_run,
apr_array_header_t merge_options,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge2, args)
def svn_client_merge(*args):
"""
svn_client_merge(char source1, svn_opt_revision_t revision1, char source2,
svn_opt_revision_t revision2, char target_wcpath,
svn_boolean_t recurse, svn_boolean_t ignore_ancestry,
svn_boolean_t force, svn_boolean_t dry_run,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge, args)
def svn_client_merge_reintegrate(*args):
"""
svn_client_merge_reintegrate(char source, svn_opt_revision_t peg_revision, char target_wcpath,
svn_boolean_t dry_run, apr_array_header_t merge_options,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge_reintegrate, args)
def svn_client_merge_peg3(*args):
"""
svn_client_merge_peg3(char source, apr_array_header_t ranges_to_merge, svn_opt_revision_t peg_revision,
char target_wcpath,
svn_depth_t depth, svn_boolean_t ignore_ancestry,
svn_boolean_t force, svn_boolean_t record_only,
svn_boolean_t dry_run, apr_array_header_t merge_options,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge_peg3, args)
def svn_client_merge_peg2(*args):
"""
svn_client_merge_peg2(char source, svn_opt_revision_t revision1, svn_opt_revision_t revision2,
svn_opt_revision_t peg_revision,
char target_wcpath, svn_boolean_t recurse,
svn_boolean_t ignore_ancestry, svn_boolean_t force,
svn_boolean_t dry_run, apr_array_header_t merge_options,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge_peg2, args)
def svn_client_merge_peg(*args):
"""
svn_client_merge_peg(char source, svn_opt_revision_t revision1, svn_opt_revision_t revision2,
svn_opt_revision_t peg_revision,
char target_wcpath, svn_boolean_t recurse,
svn_boolean_t ignore_ancestry, svn_boolean_t force,
svn_boolean_t dry_run, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_merge_peg, args)
def svn_client_suggest_merge_sources(*args):
"""
svn_client_suggest_merge_sources(apr_array_header_t suggestions, char path_or_url, svn_opt_revision_t peg_revision,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_suggest_merge_sources, args)
def svn_client_mergeinfo_get_merged(*args):
"""
svn_client_mergeinfo_get_merged(apr_hash_t mergeinfo, char path_or_url, svn_opt_revision_t peg_revision,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mergeinfo_get_merged, args)
def svn_client_mergeinfo_log_merged(*args):
"""
svn_client_mergeinfo_log_merged(char path_or_url, svn_opt_revision_t peg_revision,
char merge_source_path_or_url, svn_opt_revision_t src_peg_revision,
svn_log_entry_receiver_t receiver,
svn_boolean_t discover_changed_paths,
apr_array_header_t revprops, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mergeinfo_log_merged, args)
def svn_client_mergeinfo_log_eligible(*args):
"""
svn_client_mergeinfo_log_eligible(char path_or_url, svn_opt_revision_t peg_revision,
char merge_source_path_or_url, svn_opt_revision_t src_peg_revision,
svn_log_entry_receiver_t receiver,
svn_boolean_t discover_changed_paths,
apr_array_header_t revprops, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_mergeinfo_log_eligible, args)
def svn_client_cleanup(*args):
"""svn_client_cleanup(char dir, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_cleanup, args)
def svn_client_relocate(*args):
"""
svn_client_relocate(char dir, char from, char to, svn_boolean_t recurse,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_relocate, args)
def svn_client_revert2(*args):
"""
svn_client_revert2(apr_array_header_t paths, svn_depth_t depth, apr_array_header_t changelists,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revert2, args)
def svn_client_revert(*args):
"""
svn_client_revert(apr_array_header_t paths, svn_boolean_t recursive,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revert, args)
def svn_client_resolved(*args):
"""
svn_client_resolved(char path, svn_boolean_t recursive, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_resolved, args)
def svn_client_resolve(*args):
"""
svn_client_resolve(char path, svn_depth_t depth, svn_wc_conflict_choice_t conflict_choice,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_resolve, args)
class svn_client_copy_source_t:
"""Proxy of C svn_client_copy_source_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_copy_source_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_copy_source_t, name)
__repr__ = _swig_repr
__swig_setmethods__["path"] = _client.svn_client_copy_source_t_path_set
__swig_getmethods__["path"] = _client.svn_client_copy_source_t_path_get
__swig_setmethods__["revision"] = _client.svn_client_copy_source_t_revision_set
__swig_getmethods__["revision"] = _client.svn_client_copy_source_t_revision_get
__swig_setmethods__["peg_revision"] = _client.svn_client_copy_source_t_peg_revision_set
__swig_getmethods__["peg_revision"] = _client.svn_client_copy_source_t_peg_revision_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_copy_source_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_client_copy_source_t"""
this = apply(_client.new_svn_client_copy_source_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_client_copy_source_t
__del__ = lambda self : None;
svn_client_copy_source_t_swigregister = _client.svn_client_copy_source_t_swigregister
svn_client_copy_source_t_swigregister(svn_client_copy_source_t)
def svn_client_copy5(*args):
"""
svn_client_copy5(svn_commit_info_t commit_info_p, apr_array_header_t sources,
char dst_path, svn_boolean_t copy_as_child,
svn_boolean_t make_parents, svn_boolean_t ignore_externals,
apr_hash_t revprop_table,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_copy5, args)
def svn_client_copy4(*args):
"""
svn_client_copy4(svn_commit_info_t commit_info_p, apr_array_header_t sources,
char dst_path, svn_boolean_t copy_as_child,
svn_boolean_t make_parents, apr_hash_t revprop_table,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_copy4, args)
def svn_client_copy3(*args):
"""
svn_client_copy3(svn_commit_info_t commit_info_p, char src_path, svn_opt_revision_t src_revision,
char dst_path,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_copy3, args)
def svn_client_copy2(*args):
"""
svn_client_copy2(svn_commit_info_t commit_info_p, char src_path, svn_opt_revision_t src_revision,
char dst_path,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_copy2, args)
def svn_client_copy(*args):
"""
svn_client_copy(svn_client_commit_info_t commit_info_p, char src_path,
svn_opt_revision_t src_revision, char dst_path,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_copy, args)
def svn_client_move5(*args):
"""
svn_client_move5(svn_commit_info_t commit_info_p, apr_array_header_t src_paths,
char dst_path, svn_boolean_t force,
svn_boolean_t move_as_child, svn_boolean_t make_parents,
apr_hash_t revprop_table, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_move5, args)
def svn_client_move4(*args):
"""
svn_client_move4(svn_commit_info_t commit_info_p, char src_path, char dst_path,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_move4, args)
def svn_client_move3(*args):
"""
svn_client_move3(svn_commit_info_t commit_info_p, char src_path, char dst_path,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_move3, args)
def svn_client_move2(*args):
"""
svn_client_move2(svn_client_commit_info_t commit_info_p, char src_path,
char dst_path, svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_move2, args)
def svn_client_move(*args):
"""
svn_client_move(svn_client_commit_info_t commit_info_p, char src_path,
svn_opt_revision_t src_revision, char dst_path,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_move, args)
def svn_client_propset3(*args):
"""
svn_client_propset3(svn_commit_info_t commit_info_p, char propname, svn_string_t propval,
char target, svn_depth_t depth,
svn_boolean_t skip_checks, svn_revnum_t base_revision_for_url,
apr_array_header_t changelists,
apr_hash_t revprop_table, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propset3, args)
def svn_client_propset2(*args):
"""
svn_client_propset2(char propname, svn_string_t propval, char target, svn_boolean_t recurse,
svn_boolean_t skip_checks,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propset2, args)
def svn_client_propset(*args):
"""
svn_client_propset(char propname, svn_string_t propval, char target, svn_boolean_t recurse,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propset, args)
def svn_client_revprop_set2(*args):
"""
svn_client_revprop_set2(char propname, svn_string_t propval, svn_string_t original_propval,
char URL, svn_opt_revision_t revision,
svn_revnum_t set_rev, svn_boolean_t force,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revprop_set2, args)
def svn_client_revprop_set(*args):
"""
svn_client_revprop_set(char propname, svn_string_t propval, char URL, svn_opt_revision_t revision,
svn_revnum_t set_rev,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revprop_set, args)
def svn_client_propget3(*args):
"""
svn_client_propget3(apr_hash_t props, char propname, char target, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_revnum_t actual_revnum, svn_depth_t depth,
apr_array_header_t changelists,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propget3, args)
def svn_client_propget2(*args):
"""
svn_client_propget2(apr_hash_t props, char propname, char target, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propget2, args)
def svn_client_propget(*args):
"""
svn_client_propget(apr_hash_t props, char propname, char target, svn_opt_revision_t revision,
svn_boolean_t recurse,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_propget, args)
def svn_client_revprop_get(*args):
"""
svn_client_revprop_get(char propname, svn_string_t propval, char URL, svn_opt_revision_t revision,
svn_revnum_t set_rev,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revprop_get, args)
def svn_client_proplist3(*args):
"""
svn_client_proplist3(char target, svn_opt_revision_t peg_revision, svn_opt_revision_t revision,
svn_depth_t depth, apr_array_header_t changelists,
svn_proplist_receiver_t receiver,
void receiver_baton, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_proplist3, args)
def svn_client_proplist2(*args):
"""
svn_client_proplist2(apr_array_header_t props, char target, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_proplist2, args)
def svn_client_proplist(*args):
"""
svn_client_proplist(apr_array_header_t props, char target, svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_proplist, args)
def svn_client_revprop_list(*args):
"""
svn_client_revprop_list(apr_hash_t props, char URL, svn_opt_revision_t revision,
svn_revnum_t set_rev, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_revprop_list, args)
def svn_client_export4(*args):
"""
svn_client_export4(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t overwrite, svn_boolean_t ignore_externals,
svn_depth_t depth, char native_eol,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_export4, args)
def svn_client_export3(*args):
"""
svn_client_export3(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t overwrite, svn_boolean_t ignore_externals,
svn_boolean_t recurse,
char native_eol, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_export3, args)
def svn_client_export2(*args):
"""
svn_client_export2(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t revision,
svn_boolean_t force, char native_eol,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_export2, args)
def svn_client_export(*args):
"""
svn_client_export(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t revision,
svn_boolean_t force, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_export, args)
def svn_client_list2(*args):
"""
svn_client_list2(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision, svn_depth_t depth,
apr_uint32_t dirent_fields, svn_boolean_t fetch_locks,
svn_client_list_func_t list_func,
void baton, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_list2, args)
def svn_client_list(*args):
"""
svn_client_list(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision, svn_boolean_t recurse,
apr_uint32_t dirent_fields, svn_boolean_t fetch_locks,
svn_client_list_func_t list_func,
void baton, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_list, args)
def svn_client_ls3(*args):
"""
svn_client_ls3(apr_hash_t dirents, apr_hash_t locks, char path_or_url,
svn_opt_revision_t peg_revision, svn_opt_revision_t revision,
svn_boolean_t recurse,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_ls3, args)
def svn_client_ls2(*args):
"""
svn_client_ls2(apr_hash_t dirents, char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_ls2, args)
def svn_client_ls(*args):
"""
svn_client_ls(apr_hash_t dirents, char path_or_url, svn_opt_revision_t revision,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_ls, args)
def svn_client_cat2(*args):
"""
svn_client_cat2(svn_stream_t out, char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_cat2, args)
def svn_client_cat(*args):
"""
svn_client_cat(svn_stream_t out, char path_or_url, svn_opt_revision_t revision,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_cat, args)
def svn_client_add_to_changelist(*args):
"""
svn_client_add_to_changelist(apr_array_header_t paths, char changelist, svn_depth_t depth,
apr_array_header_t changelists, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_add_to_changelist, args)
def svn_client_remove_from_changelists(*args):
"""
svn_client_remove_from_changelists(apr_array_header_t paths, svn_depth_t depth, apr_array_header_t changelists,
svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_remove_from_changelists, args)
def svn_client_get_changelists(*args):
"""
svn_client_get_changelists(char path, apr_array_header_t changelists, svn_depth_t depth,
svn_changelist_receiver_t callback_func,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_get_changelists, args)
def svn_client_lock(*args):
"""
svn_client_lock(apr_array_header_t targets, char comment, svn_boolean_t steal_lock,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_lock, args)
def svn_client_unlock(*args):
"""
svn_client_unlock(apr_array_header_t targets, svn_boolean_t break_lock,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_unlock, args)
class svn_info_t:
"""Proxy of C svn_info_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_info_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_info_t, name)
__repr__ = _swig_repr
__swig_setmethods__["URL"] = _client.svn_info_t_URL_set
__swig_getmethods__["URL"] = _client.svn_info_t_URL_get
__swig_setmethods__["rev"] = _client.svn_info_t_rev_set
__swig_getmethods__["rev"] = _client.svn_info_t_rev_get
__swig_setmethods__["kind"] = _client.svn_info_t_kind_set
__swig_getmethods__["kind"] = _client.svn_info_t_kind_get
__swig_setmethods__["repos_root_URL"] = _client.svn_info_t_repos_root_URL_set
__swig_getmethods__["repos_root_URL"] = _client.svn_info_t_repos_root_URL_get
__swig_setmethods__["repos_UUID"] = _client.svn_info_t_repos_UUID_set
__swig_getmethods__["repos_UUID"] = _client.svn_info_t_repos_UUID_get
__swig_setmethods__["last_changed_rev"] = _client.svn_info_t_last_changed_rev_set
__swig_getmethods__["last_changed_rev"] = _client.svn_info_t_last_changed_rev_get
__swig_setmethods__["last_changed_date"] = _client.svn_info_t_last_changed_date_set
__swig_getmethods__["last_changed_date"] = _client.svn_info_t_last_changed_date_get
__swig_setmethods__["last_changed_author"] = _client.svn_info_t_last_changed_author_set
__swig_getmethods__["last_changed_author"] = _client.svn_info_t_last_changed_author_get
__swig_setmethods__["lock"] = _client.svn_info_t_lock_set
__swig_getmethods__["lock"] = _client.svn_info_t_lock_get
__swig_setmethods__["has_wc_info"] = _client.svn_info_t_has_wc_info_set
__swig_getmethods__["has_wc_info"] = _client.svn_info_t_has_wc_info_get
__swig_setmethods__["schedule"] = _client.svn_info_t_schedule_set
__swig_getmethods__["schedule"] = _client.svn_info_t_schedule_get
__swig_setmethods__["copyfrom_url"] = _client.svn_info_t_copyfrom_url_set
__swig_getmethods__["copyfrom_url"] = _client.svn_info_t_copyfrom_url_get
__swig_setmethods__["copyfrom_rev"] = _client.svn_info_t_copyfrom_rev_set
__swig_getmethods__["copyfrom_rev"] = _client.svn_info_t_copyfrom_rev_get
__swig_setmethods__["text_time"] = _client.svn_info_t_text_time_set
__swig_getmethods__["text_time"] = _client.svn_info_t_text_time_get
__swig_setmethods__["prop_time"] = _client.svn_info_t_prop_time_set
__swig_getmethods__["prop_time"] = _client.svn_info_t_prop_time_get
__swig_setmethods__["checksum"] = _client.svn_info_t_checksum_set
__swig_getmethods__["checksum"] = _client.svn_info_t_checksum_get
__swig_setmethods__["conflict_old"] = _client.svn_info_t_conflict_old_set
__swig_getmethods__["conflict_old"] = _client.svn_info_t_conflict_old_get
__swig_setmethods__["conflict_new"] = _client.svn_info_t_conflict_new_set
__swig_getmethods__["conflict_new"] = _client.svn_info_t_conflict_new_get
__swig_setmethods__["conflict_wrk"] = _client.svn_info_t_conflict_wrk_set
__swig_getmethods__["conflict_wrk"] = _client.svn_info_t_conflict_wrk_get
__swig_setmethods__["prejfile"] = _client.svn_info_t_prejfile_set
__swig_getmethods__["prejfile"] = _client.svn_info_t_prejfile_get
__swig_setmethods__["changelist"] = _client.svn_info_t_changelist_set
__swig_getmethods__["changelist"] = _client.svn_info_t_changelist_get
__swig_setmethods__["depth"] = _client.svn_info_t_depth_set
__swig_getmethods__["depth"] = _client.svn_info_t_depth_get
__swig_setmethods__["working_size"] = _client.svn_info_t_working_size_set
__swig_getmethods__["working_size"] = _client.svn_info_t_working_size_get
__swig_setmethods__["size"] = _client.svn_info_t_size_set
__swig_getmethods__["size"] = _client.svn_info_t_size_get
__swig_setmethods__["size64"] = _client.svn_info_t_size64_set
__swig_getmethods__["size64"] = _client.svn_info_t_size64_get
__swig_setmethods__["working_size64"] = _client.svn_info_t_working_size64_set
__swig_getmethods__["working_size64"] = _client.svn_info_t_working_size64_get
__swig_setmethods__["tree_conflict"] = _client.svn_info_t_tree_conflict_set
__swig_getmethods__["tree_conflict"] = _client.svn_info_t_tree_conflict_get
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_info_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __init__(self, *args):
"""__init__(self) -> svn_info_t"""
this = apply(_client.new_svn_info_t, args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _client.delete_svn_info_t
__del__ = lambda self : None;
svn_info_t_swigregister = _client.svn_info_t_swigregister
svn_info_t_swigregister(svn_info_t)
def svn_info_dup(*args):
"""svn_info_dup(svn_info_t info, apr_pool_t pool) -> svn_info_t"""
return apply(_client.svn_info_dup, args)
def svn_client_info2(*args):
"""
svn_client_info2(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision, svn_info_receiver_t receiver,
svn_depth_t depth, apr_array_header_t changelists,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_info2, args)
def svn_client_info(*args):
"""
svn_client_info(char path_or_url, svn_opt_revision_t peg_revision,
svn_opt_revision_t revision, svn_info_receiver_t receiver,
svn_boolean_t recurse, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_info, args)
def svn_client_url_from_path(*args):
"""svn_client_url_from_path(char url, char path_or_url, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_url_from_path, args)
def svn_client_root_url_from_path(*args):
"""svn_client_root_url_from_path(char url, char path_or_url, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_root_url_from_path, args)
def svn_client_uuid_from_url(*args):
"""svn_client_uuid_from_url(char uuid, char url, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
return apply(_client.svn_client_uuid_from_url, args)
def svn_client_uuid_from_path(*args):
"""
svn_client_uuid_from_path(char uuid, char path, svn_wc_adm_access_t adm_access,
svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_uuid_from_path, args)
def svn_client_open_ra_session(*args):
"""
svn_client_open_ra_session(svn_ra_session_t session, char url, svn_client_ctx_t ctx,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_open_ra_session, args)
def svn_proplist_invoke_receiver(*args):
"""
svn_proplist_invoke_receiver(svn_proplist_receiver_t _obj, void baton, char path,
apr_hash_t prop_hash, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_proplist_invoke_receiver, args)
def svn_client_invoke_get_commit_log3(*args):
"""
svn_client_invoke_get_commit_log3(svn_client_get_commit_log3_t _obj, char log_msg, char tmp_file,
apr_array_header_t commit_items,
void baton, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_get_commit_log3, args)
def svn_client_invoke_get_commit_log2(*args):
"""
svn_client_invoke_get_commit_log2(svn_client_get_commit_log2_t _obj, char log_msg, char tmp_file,
apr_array_header_t commit_items,
void baton, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_get_commit_log2, args)
def svn_client_invoke_get_commit_log(*args):
"""
svn_client_invoke_get_commit_log(svn_client_get_commit_log_t _obj, char log_msg, char tmp_file,
apr_array_header_t commit_items,
void baton, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_get_commit_log, args)
def svn_client_invoke_blame_receiver2(*args):
"""
svn_client_invoke_blame_receiver2(svn_client_blame_receiver2_t _obj, void baton, apr_int64_t line_no,
svn_revnum_t revision, char author,
char date, svn_revnum_t merged_revision,
char merged_author, char merged_date, char merged_path,
char line, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_blame_receiver2, args)
def svn_client_invoke_blame_receiver(*args):
"""
svn_client_invoke_blame_receiver(svn_client_blame_receiver_t _obj, void baton, apr_int64_t line_no,
svn_revnum_t revision, char author,
char date, char line, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_blame_receiver, args)
def svn_client_invoke_diff_summarize_func(*args):
"""
svn_client_invoke_diff_summarize_func(svn_client_diff_summarize_func_t _obj, svn_client_diff_summarize_t diff,
void baton, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_diff_summarize_func, args)
def svn_client_invoke_list_func(*args):
"""
svn_client_invoke_list_func(svn_client_list_func_t _obj, void baton, char path,
svn_dirent_t dirent, svn_lock_t lock, char abs_path,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_client_invoke_list_func, args)
def svn_changelist_invoke_receiver(*args):
"""
svn_changelist_invoke_receiver(svn_changelist_receiver_t _obj, void baton, char path,
char changelist, apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_changelist_invoke_receiver, args)
def svn_info_invoke_receiver(*args):
"""
svn_info_invoke_receiver(svn_info_receiver_t _obj, void baton, char path, svn_info_t info,
apr_pool_t pool) -> svn_error_t
"""
return apply(_client.svn_info_invoke_receiver, args)
class svn_proplist_receiver_t:
"""Proxy of C svn_proplist_receiver_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_proplist_receiver_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_proplist_receiver_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_proplist_receiver_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_proplist_invoke_receiver(self, *args)
svn_proplist_receiver_t_swigregister = _client.svn_proplist_receiver_t_swigregister
svn_proplist_receiver_t_swigregister(svn_proplist_receiver_t)
class svn_client_get_commit_log3_t:
"""Proxy of C svn_client_get_commit_log3_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log3_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log3_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_get_commit_log3_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_get_commit_log3(self, *args)
svn_client_get_commit_log3_t_swigregister = _client.svn_client_get_commit_log3_t_swigregister
svn_client_get_commit_log3_t_swigregister(svn_client_get_commit_log3_t)
class svn_client_get_commit_log2_t:
"""Proxy of C svn_client_get_commit_log2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_get_commit_log2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_get_commit_log2(self, *args)
svn_client_get_commit_log2_t_swigregister = _client.svn_client_get_commit_log2_t_swigregister
svn_client_get_commit_log2_t_swigregister(svn_client_get_commit_log2_t)
class svn_client_get_commit_log_t:
"""Proxy of C svn_client_get_commit_log_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_get_commit_log_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_get_commit_log(self, *args)
svn_client_get_commit_log_t_swigregister = _client.svn_client_get_commit_log_t_swigregister
svn_client_get_commit_log_t_swigregister(svn_client_get_commit_log_t)
class svn_client_blame_receiver2_t:
"""Proxy of C svn_client_blame_receiver2_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_blame_receiver2_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_blame_receiver2_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_blame_receiver2_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_blame_receiver2(self, *args)
svn_client_blame_receiver2_t_swigregister = _client.svn_client_blame_receiver2_t_swigregister
svn_client_blame_receiver2_t_swigregister(svn_client_blame_receiver2_t)
class svn_client_blame_receiver_t:
"""Proxy of C svn_client_blame_receiver_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_blame_receiver_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_blame_receiver_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_blame_receiver_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_blame_receiver(self, *args)
svn_client_blame_receiver_t_swigregister = _client.svn_client_blame_receiver_t_swigregister
svn_client_blame_receiver_t_swigregister(svn_client_blame_receiver_t)
class svn_client_diff_summarize_func_t:
"""Proxy of C svn_client_diff_summarize_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_diff_summarize_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_diff_summarize_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_diff_summarize_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_diff_summarize_func(self, *args)
svn_client_diff_summarize_func_t_swigregister = _client.svn_client_diff_summarize_func_t_swigregister
svn_client_diff_summarize_func_t_swigregister(svn_client_diff_summarize_func_t)
class svn_client_list_func_t:
"""Proxy of C svn_client_list_func_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_list_func_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_client_list_func_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_client_list_func_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_client_invoke_list_func(self, *args)
svn_client_list_func_t_swigregister = _client.svn_client_list_func_t_swigregister
svn_client_list_func_t_swigregister(svn_client_list_func_t)
class svn_changelist_receiver_t:
"""Proxy of C svn_changelist_receiver_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_changelist_receiver_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_changelist_receiver_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_changelist_receiver_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_changelist_invoke_receiver(self, *args)
svn_changelist_receiver_t_swigregister = _client.svn_changelist_receiver_t_swigregister
svn_changelist_receiver_t_swigregister(svn_changelist_receiver_t)
class svn_info_receiver_t:
"""Proxy of C svn_info_receiver_t struct"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, svn_info_receiver_t, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, svn_info_receiver_t, name)
def __init__(self, *args, **kwargs): raise AttributeError, "No constructor defined"
__repr__ = _swig_repr
def set_parent_pool(self, parent_pool=None):
"""Create a new proxy object for svn_info_receiver_t"""
import libsvn.core, weakref
self.__dict__["_parent_pool"] = \
parent_pool or libsvn.core.application_pool;
if self.__dict__["_parent_pool"]:
self.__dict__["_is_valid"] = weakref.ref(
self.__dict__["_parent_pool"]._is_valid)
def assert_valid(self):
"""Assert that this object is using valid pool memory"""
if "_is_valid" in self.__dict__:
assert self.__dict__["_is_valid"](), "Variable has already been deleted"
def __getattr__(self, name):
"""Get an attribute from this object"""
self.assert_valid()
value = _swig_getattr(self, self.__class__, name)
members = self.__dict__.get("_members")
if members is not None:
old_value = members.get(name)
if (old_value is not None and value is not None and
value is not old_value):
try:
value.__dict__.update(old_value.__dict__)
except AttributeError:
pass
if hasattr(value, "assert_valid"):
value.assert_valid()
return value
def __setattr__(self, name, value):
"""Set an attribute on this object"""
self.assert_valid()
self.__dict__.setdefault("_members",{})[name] = value
return _swig_setattr(self, self.__class__, name, value)
def __call__(self, *args):
return svn_info_invoke_receiver(self, *args)
svn_info_receiver_t_swigregister = _client.svn_info_receiver_t_swigregister
svn_info_receiver_t_swigregister(svn_info_receiver_t)
SWIG_SVN_INFO_SIZE_UNKNOWN = _client.SWIG_SVN_INFO_SIZE_UNKNOWN
svn_swig_py_cancel_func = _client.svn_swig_py_cancel_func
svn_swig_py_get_commit_log_func = _client.svn_swig_py_get_commit_log_func
svn_swig_py_notify_func = _client.svn_swig_py_notify_func
| 39.600154
| 146
| 0.738431
| 14,850
| 102,604
| 4.452121
| 0.027407
| 0.115709
| 0.061938
| 0.032444
| 0.894667
| 0.848595
| 0.798878
| 0.771425
| 0.739405
| 0.70867
| 0
| 0.004779
| 0.180237
| 102,604
| 2,590
| 147
| 39.615444
| 0.781253
| 0.002017
| 0
| 0.508885
| 1
| 0
| 0.065873
| 0.001331
| 0
| 0
| 0
| 0
| 0.087237
| 0
| null | null | 0.015347
| 0.024233
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5c1a77127e714bbe04a40a8a0efe5d0cd884c2f5
| 113
|
py
|
Python
|
backend/config.py
|
mattj241/Trivia_API
|
9e7ab06997df7a3be4daffc3385116178bc78c86
|
[
"MIT"
] | null | null | null |
backend/config.py
|
mattj241/Trivia_API
|
9e7ab06997df7a3be4daffc3385116178bc78c86
|
[
"MIT"
] | null | null | null |
backend/config.py
|
mattj241/Trivia_API
|
9e7ab06997df7a3be4daffc3385116178bc78c86
|
[
"MIT"
] | null | null | null |
import os
def init_env_vars():
os.environ['DB_USER'] = 'postgres'
os.environ['DB_PASSWORD'] = 'marshall'
| 22.6
| 42
| 0.672566
| 16
| 113
| 4.5
| 0.75
| 0.25
| 0.305556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159292
| 113
| 5
| 42
| 22.6
| 0.757895
| 0
| 0
| 0
| 0
| 0
| 0.298246
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a51b80710fc2a236f6a7065d6603b2abcdd9093f
| 8,744
|
py
|
Python
|
tests/integration/transformers/test_numerical.py
|
HDI-Project/RDT
|
f1648d10346f4e431957aca65e25a00879a5d419
|
[
"MIT"
] | 8
|
2018-06-20T22:59:07.000Z
|
2019-02-19T08:48:53.000Z
|
tests/integration/transformers/test_numerical.py
|
HDI-Project/RDT
|
f1648d10346f4e431957aca65e25a00879a5d419
|
[
"MIT"
] | 63
|
2018-06-20T22:08:37.000Z
|
2019-12-16T18:57:08.000Z
|
tests/integration/transformers/test_numerical.py
|
HDI-Project/RDT
|
f1648d10346f4e431957aca65e25a00879a5d419
|
[
"MIT"
] | 5
|
2018-11-06T16:45:48.000Z
|
2020-01-02T13:41:07.000Z
|
import numpy as np
import pandas as pd
from rdt.transformers.numerical import ClusterBasedNormalizer, FloatFormatter, GaussianNormalizer
class TestFloatFormatter:
def test_model_missing_values(self):
data = pd.DataFrame([1, 2, 1, 2, np.nan, 1], columns=['a'])
column = 'a'
nt = FloatFormatter(
missing_value_replacement='mean',
model_missing_values=True,
)
nt.fit(data, column)
transformed = nt.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 2)
assert list(transformed.iloc[:, 1]) == [0, 0, 0, 0, 1, 0]
reverse = nt.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=2)
def test_not_model_missing_values(self):
data = pd.DataFrame([1, 2, 1, 2, np.nan, 1], columns=['a'])
column = 'a'
nt = FloatFormatter(model_missing_values=False)
nt.fit(data, column)
transformed = nt.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 1)
reverse = nt.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=2)
def test_int(self):
data = pd.DataFrame([1, 2, 1, 2, 1], columns=['a'])
column = 'a'
nt = FloatFormatter()
nt.fit(data, column)
transformed = nt.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (5, 1)
reverse = nt.reverse_transform(transformed)
assert list(reverse['a']) == [1, 2, 1, 2, 1]
def test_int_nan_not_model_missing_values(self):
data = pd.DataFrame([1, 2, 1, 2, 1, np.nan], columns=['a'])
column = 'a'
nt = FloatFormatter()
nt.fit(data, column)
transformed = nt.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 1)
reverse = nt.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=2)
class TestGaussianNormalizer:
def test_stats(self):
data = pd.DataFrame(np.random.normal(loc=4, scale=4, size=1000), columns=['a'])
column = 'a'
ct = GaussianNormalizer()
ct.fit(data, column)
transformed = ct.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (1000, 1)
np.testing.assert_almost_equal(transformed['a.value'].mean(), 0, decimal=1)
np.testing.assert_almost_equal(transformed['a.value'].std(), 1, decimal=1)
reverse = ct.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=1)
def test_model_missing_values(self):
data = pd.DataFrame([1, 2, 1, 2, np.nan, 1], columns=['a'])
column = 'a'
ct = GaussianNormalizer(model_missing_values=True)
ct.fit(data, column)
transformed = ct.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 2)
assert list(transformed.iloc[:, 1]) == [0, 0, 0, 0, 1, 0]
reverse = ct.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=2)
def test_not_model_missing_values(self):
random_state = np.random.get_state()
np.random.set_state(np.random.RandomState(6).get_state())
data = pd.DataFrame([1, 2, 1, 2, np.nan, 1], columns=['a'])
column = 'a'
ct = GaussianNormalizer(model_missing_values=False)
ct.fit(data, column)
transformed = ct.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 1)
reverse = ct.reverse_transform(transformed)
expected = pd.DataFrame([1, 2, 1, np.nan, np.nan, 1], columns=['a'])
pd.testing.assert_frame_equal(reverse, expected)
np.random.set_state(random_state)
def test_int(self):
data = pd.DataFrame([1, 2, 1, 2, 1], columns=['a'])
column = 'a'
ct = GaussianNormalizer()
ct.fit(data, column)
transformed = ct.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (5, 1)
reverse = ct.reverse_transform(transformed)
assert list(reverse['a']) == [1, 2, 1, 2, 1]
def test_int_nan(self):
data = pd.DataFrame([1, 2, 1, 2, 1, np.nan], columns=['a'])
column = 'a'
ct = GaussianNormalizer(model_missing_values=True)
ct.fit(data, column)
transformed = ct.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (6, 2)
reverse = ct.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=2)
class TestClusterBasedNormalizer:
def generate_data(self):
data1 = np.random.normal(loc=5, scale=1, size=100)
data2 = np.random.normal(loc=-5, scale=1, size=100)
data = np.concatenate([data1, data2])
return pd.DataFrame(data, columns=['col'])
def test_dataframe(self):
data = self.generate_data()
column = 'col'
bgmm_transformer = ClusterBasedNormalizer()
bgmm_transformer.fit(data, column)
transformed = bgmm_transformer.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (200, 2)
assert all(isinstance(x, float) for x in transformed['col.normalized'])
assert all(isinstance(x, float) for x in transformed['col.component'])
reverse = bgmm_transformer.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=1)
def test_some_nulls(self):
random_state = np.random.get_state()
np.random.set_state(np.random.RandomState(10).get_state())
data = self.generate_data()
mask = np.random.choice([1, 0], data.shape, p=[.1, .9]).astype(bool)
data[mask] = np.nan
column = 'col'
bgmm_transformer = ClusterBasedNormalizer(model_missing_values=True)
bgmm_transformer.fit(data, column)
transformed = bgmm_transformer.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert transformed.shape == (200, 3)
assert all(isinstance(x, float) for x in transformed['col.normalized'])
assert all(isinstance(x, float) for x in transformed['col.component'])
assert all(isinstance(x, float) for x in transformed['col.is_null'])
reverse = bgmm_transformer.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=1)
np.random.set_state(random_state)
def test_data_different_sizes(self):
data = np.concatenate([
np.random.normal(loc=5, scale=1, size=100),
np.random.normal(loc=100, scale=1, size=500),
])
data = pd.DataFrame(data, columns=['col'])
column = 'col'
bgmm_transformer = ClusterBasedNormalizer()
bgmm_transformer.fit(data, column)
transformed = bgmm_transformer.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert all(isinstance(x, float) for x in transformed['col.normalized'])
assert all(isinstance(x, float) for x in transformed['col.component'])
reverse = bgmm_transformer.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=1)
def test_multiple_components(self):
random_state = np.random.get_state()
np.random.set_state(np.random.RandomState(10).get_state())
data = np.concatenate([
np.random.normal(loc=5, scale=0.02, size=300),
np.random.normal(loc=-4, scale=0.1, size=1000),
np.random.normal(loc=-180, scale=3, size=1500),
np.random.normal(loc=100, scale=10, size=500),
])
data = pd.DataFrame(data, columns=['col'])
data = data.sample(frac=1).reset_index(drop=True)
column = 'col'
bgmm_transformer = ClusterBasedNormalizer()
bgmm_transformer.fit(data, column)
transformed = bgmm_transformer.transform(data)
assert isinstance(transformed, pd.DataFrame)
assert all(isinstance(x, float) for x in transformed['col.normalized'])
assert all(isinstance(x, float) for x in transformed['col.component'])
reverse = bgmm_transformer.reverse_transform(transformed)
np.testing.assert_array_almost_equal(reverse, data, decimal=1)
np.random.set_state(random_state)
| 35.689796
| 97
| 0.641926
| 1,078
| 8,744
| 5.081633
| 0.106679
| 0.052209
| 0.00931
| 0.056955
| 0.865462
| 0.846477
| 0.822746
| 0.822746
| 0.803395
| 0.760679
| 0
| 0.027346
| 0.234675
| 8,744
| 244
| 98
| 35.836066
| 0.791243
| 0
| 0
| 0.752809
| 0
| 0
| 0.020471
| 0
| 0
| 0
| 0
| 0
| 0.280899
| 1
| 0.078652
| false
| 0
| 0.016854
| 0
| 0.117978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb9e8e28fc8b178cb17120bdf7f8c033665aa472
| 5,454
|
py
|
Python
|
Python 3 Programming/course 3/course_3_project.py
|
ElizaLo/Practice-Python
|
81cc82b4fbe68c13647f18ea659c7ef1025ec951
|
[
"MIT"
] | 5
|
2020-07-20T10:57:28.000Z
|
2021-12-09T01:54:59.000Z
|
Python 3 Programming/course 3/course_3_project.py
|
ElizaLo/Practice
|
81cc82b4fbe68c13647f18ea659c7ef1025ec951
|
[
"MIT"
] | 1
|
2020-10-02T15:26:57.000Z
|
2020-10-02T15:26:57.000Z
|
Python 3 Programming/course 3/course_3_project.py
|
ElizaLo/Practice
|
81cc82b4fbe68c13647f18ea659c7ef1025ec951
|
[
"MIT"
] | 5
|
2020-06-06T14:16:01.000Z
|
2021-01-27T17:38:32.000Z
|
import requests_with_caching
import json
def get_movies_from_tastedive(title):
url = 'https://tastedive.com/api/similar'
param = {}
param['q']= title
param['type']= 'movies'
param['limit']= 5
this_page_cache = requests_with_caching.get(url, params=param)
return json.loads(this_page_cache.text)
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
get_movies_from_tastedive("Bridesmaids")
get_movies_from_tastedive("Black Panther")
import requests_with_caching
import json
def get_movies_from_tastedive(title):
url = 'https://tastedive.com/api/similar'
param = {}
param['q']= title
param['type']= 'movies'
param['limit']= 5
this_page_cache = requests_with_caching.get(url, params=param)
return json.loads(this_page_cache.text)
def extract_movie_titles(d):
return [i['Name'] for i in d['Similar']['Results']]
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
#extract_movie_titles(get_movies_from_tastedive("Tony Bennett"))
#extract_movie_titles(get_movies_from_tastedive("Black Panther"))
import requests_with_caching
import json
def get_movies_from_tastedive(title):
url = 'https://tastedive.com/api/similar'
param = {}
param['q']= title
param['type']= 'movies'
param['limit']= 5
this_page_cache = requests_with_caching.get(url, params=param)
return json.loads(this_page_cache.text)
def extract_movie_titles(d):
return [i['Name'] for i in d['Similar']['Results']]
def get_related_titles(lst):
l = []
for title in lst:
movies = get_movies_from_tastedive(title)
titles = extract_movie_titles(movies)
l1 = [l.append(movie) for movie in titles if movie not in l]
return l
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
#extract_movie_titles(get_movies_from_tastedive("Tony Bennett"))
#extract_movie_titles(get_movies_from_tastedive("Black Panther"))
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
#get_related_titles(["Black Panther", "Captain Marvel"])
# get_related_titles([])
import requests_with_caching
import json
def get_movie_data(movie_title):
url = 'http://www.omdbapi.com/'
param = {'t': movie_title, 'r': 'json'}
this_page_cache = requests_with_caching.get(url, params=param)
return json.loads(this_page_cache.text)
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
# get_movie_data("Venom")
# get_movie_data("Baby Mama")
import requests_with_caching
import json
def get_movie_data(movie_title):
url = 'http://www.omdbapi.com/'
param = {'t': movie_title, 'r': 'json'}
this_page_cache = requests_with_caching.get(url, params=param)
#print(json.loads(this_page_cache.text))
return json.loads(this_page_cache.text)
def get_movie_rating(d):
movie_ratings = d['Ratings']
for rate in movie_ratings:
if rate['Source'] == 'Rotten Tomatoes':
print(rate['Value'])
return int(rate['Value'][:-1])
return 0
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
#get_movie_data("Black Panther")
#get_movie_data("Baby Mama")
#get_movie_data('Deadpool 2')
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
# get_movie_rating(get_movie_data("Deadpool 2"))
import requests_with_caching
import json
def get_movies_from_tastedive(title):
url = 'https://tastedive.com/api/similar'
param = {}
param['q']= title
param['type']= 'movies'
param['limit']= 5
this_page_cache = requests_with_caching.get(url, params=param)
return json.loads(this_page_cache.text)
def extract_movie_titles(d):
return [i['Name'] for i in d['Similar']['Results']]
def get_related_titles(lst):
l = []
for title in lst:
movies = get_movies_from_tastedive(title)
titles = extract_movie_titles(movies)
l1 = [l.append(movie) for movie in titles if movie not in l]
return l
def get_movie_data(movie_title):
url = 'http://www.omdbapi.com/'
param = {'t': movie_title, 'r': 'json'}
this_page_cache = requests_with_caching.get(url, params=param)
#print(json.loads(this_page_cache.text))
return json.loads(this_page_cache.text)
def get_movie_rating(d):
movie_ratings = d['Ratings']
for rate in movie_ratings:
if rate['Source'] == 'Rotten Tomatoes':
print(rate['Value'])
return int(rate['Value'][:-1])
return 0
def get_sorted_recommendations(lst_movie_titles):
new_lst = get_related_titles(lst_movie_titles)
new_d = {}
for title in new_lst:
data = get_movie_data(title)
rate = get_movie_rating(data)
new_d[title] = rate
#print(new_d)
return [i[0] for i in sorted(new_d.items(), key=lambda item: (item[1], item[0]), reverse=True)]
# some invocations that we use in the automated tests; uncomment these if you are getting errors and want better error messages
# get_sorted_recommendations(["Bridesmaids", "Sherlock Holmes"])
| 30.640449
| 127
| 0.715438
| 801
| 5,454
| 4.654182
| 0.128589
| 0.034335
| 0.055794
| 0.070815
| 0.898605
| 0.876609
| 0.876609
| 0.876609
| 0.876609
| 0.876609
| 0
| 0.003337
| 0.175834
| 5,454
| 177
| 128
| 30.813559
| 0.826029
| 0.307481
| 0
| 0.902913
| 0
| 0
| 0.116533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.145631
| false
| 0
| 0.116505
| 0.029126
| 0.427184
| 0.019417
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eba4c6f7c4f94f626457879b7e1885eea47df875
| 6,209
|
py
|
Python
|
eparhiapp/models.py
|
varkon/eparhiaodua
|
f2a2d6fd3c89748712e38f4035a720c6a7ad7fe3
|
[
"Apache-2.0"
] | 2
|
2018-04-24T10:41:11.000Z
|
2018-04-24T10:41:15.000Z
|
eparhiapp/models.py
|
varkon/eparhia
|
f2a2d6fd3c89748712e38f4035a720c6a7ad7fe3
|
[
"Apache-2.0"
] | null | null | null |
eparhiapp/models.py
|
varkon/eparhia
|
f2a2d6fd3c89748712e38f4035a720c6a7ad7fe3
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
from django.utils import timezone
from tinymce.models import HTMLField
from filebrowser.fields import FileBrowseField
from eparhiapp.apps import transliterate
# Create your models here.
class Patriarch(models.Model):
title = models.CharField(max_length=255, verbose_name='Заголовок')
annonce = HTMLField(verbose_name='Анонс (не обовьязково - зараз не використовується)', null = True, blank = True)
body = HTMLField(verbose_name='Повний текст')
link = models.CharField(max_length=255, verbose_name='Посилання', unique=True, blank=True, default="patriarch")
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
class Meta:
verbose_name = 'Патриарх'
verbose_name_plural = 'Патриарх'
# this is not needed for create link
def save(self, *args, **kwargs):
self.createlink()
super(Patriarch, self).save(*args, **kwargs)
def publish(self):
self.published_date = timezone.now()
if (self.link == ""):
self.link = transliterate(self.title)
self.save()
def createlink(self):
if (self.link == ""):
self.link = transliterate(self.title)
def __str__(self):
return self.title
class Archbishop(models.Model):
title = models.CharField(max_length=255, verbose_name='Заголовок')
annonce = HTMLField(verbose_name='Анонс (не обовьязково - зараз не використовується)', null = True, blank = True)
body = HTMLField(verbose_name='Повний текст')
link = models.CharField(max_length=255, verbose_name='Посилання', unique=True, blank=True, default = "archbishop")
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
class Meta:
verbose_name = 'Єпископ'
verbose_name_plural = 'Єпископи'
# this is not needed for create link
def save(self, *args, **kwargs):
self.createlink()
super(Archbishop, self).save(*args, **kwargs)
def publish(self):
self.published_date = timezone.now()
if (self.link == ""):
self.link = transliterate(self.title)
self.save()
def createlink(self):
if (self.link == ""):
self.link = transliterate(self.title)
def __str__(self):
return self.title
class Primat(models.Model):
title = models.CharField(max_length=255, verbose_name='Заголовок')
annonce = HTMLField(verbose_name='Анонс (не обовьязково - зараз не використовується)', null = True, blank = True)
body = HTMLField(verbose_name='Повний текст')
link = models.CharField(max_length=255, verbose_name='Посилання', unique=True, blank=True, default = "primat")
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
class Meta:
verbose_name = 'Предстоятель'
verbose_name_plural = 'Предстоятелі'
# this is not needed for create link
def save(self, *args, **kwargs):
self.createlink()
super(Primat, self).save(*args, **kwargs)
def publish(self):
self.published_date = timezone.now()
if (self.link == ""):
self.link = transliterate(self.title)
self.save()
def createlink(self):
if (self.link == ""):
self.link = transliterate(self.title)
def __str__(self):
return self.title
class About(models.Model):
title = models.CharField(max_length=255, verbose_name='Заголовок')
annonce = HTMLField(verbose_name='Анонс (не обовьязково - зараз не використовується)', null = True, blank = True)
body = HTMLField(verbose_name='Повний текст')
link = models.CharField(max_length=255, verbose_name='Посилання', unique=True, blank=True, default="about")
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
class Meta:
verbose_name = 'Контакт'
verbose_name_plural = 'Контакти'
# this is not needed for create link
def save(self, *args, **kwargs):
self.createlink()
super(About, self).save(*args, **kwargs)
def publish(self):
self.published_date = timezone.now()
if (self.link == ""):
self.link = transliterate(self.title)
self.save()
def createlink(self):
if (self.link == ""):
self.link = transliterate(self.title)
def __str__(self):
return self.title
class Benefactor(models.Model):
title = models.CharField(max_length=255, verbose_name='Заголовок')
annonce = HTMLField(verbose_name='Анонс (не обовьязково - зараз не використовується)', null=True, blank=True)
body = HTMLField(verbose_name='Повний текст')
link = models.CharField(max_length=255, verbose_name='Посилання', unique=True, blank=True, default="benefactors")
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
created_date = models.DateTimeField(
default=timezone.now)
published_date = models.DateTimeField(
blank=True, null=True)
class Meta:
verbose_name = 'Благодійник'
verbose_name_plural = 'Благодійники'
# this is not needed for create link
def save(self, *args, **kwargs):
self.createlink()
super(Benefactor, self).save(*args, **kwargs)
def publish(self):
self.published_date = timezone.now()
if (self.link == ""):
self.link = transliterate(self.title)
self.save()
def createlink(self):
if (self.link == ""):
self.link = transliterate(self.title)
def __str__(self):
return self.title
| 35.890173
| 121
| 0.647769
| 709
| 6,209
| 5.552891
| 0.125529
| 0.08382
| 0.04572
| 0.06096
| 0.876302
| 0.876302
| 0.876302
| 0.876302
| 0.876302
| 0.876302
| 0
| 0.00631
| 0.234337
| 6,209
| 173
| 122
| 35.890173
| 0.821834
| 0.03205
| 0
| 0.777778
| 0
| 0
| 0.096436
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148148
| false
| 0
| 0.037037
| 0.037037
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
ebda3c319179615be181d793f3d044d74518c9f0
| 1,200
|
py
|
Python
|
appCore/apps/replica/uploads.py
|
jadedgamer/alifewellplayed.com
|
b7b3dee8d3b9526c7cfe77078570a29394ef7e76
|
[
"MIT"
] | 4
|
2017-04-22T11:03:01.000Z
|
2018-01-16T22:28:15.000Z
|
appCore/apps/replica/uploads.py
|
alifewellplayed/alifewellplayed.com
|
b7b3dee8d3b9526c7cfe77078570a29394ef7e76
|
[
"MIT"
] | 10
|
2017-04-06T19:54:42.000Z
|
2017-11-07T06:53:10.000Z
|
appCore/apps/replica/uploads.py
|
alifewellplayed/alifewellplayed.com
|
b7b3dee8d3b9526c7cfe77078570a29394ef7e76
|
[
"MIT"
] | 1
|
2017-12-14T12:49:40.000Z
|
2017-12-14T12:49:40.000Z
|
import datetime
def upload_css(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (instance.slug, ext)
path = 'css/%s/%s' % (instance.id, filename)
return path
def upload_media(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (instance.slug, ext)
date = instance.date_created
datepath_path = datetime.date.today().strftime("%Y/%m/%d")
path = 'media/%s/%s/%s' % (datepath_path, instance.id, filename)
#overwrite_existing(path)
return path
def upload_media_md(instance, filename):
ext = filename.split('.')[-1]
filename = "%s_md.%s" % (instance.slug, ext)
date = instance.date_created
datepath_path = datetime.date.today().strftime("%Y/%m/%d")
path = 'media/%s/%s/%s' % (datepath_path, instance.id, filename)
#overwrite_existing(path)
return path
def upload_media_sm(instance, filename):
ext = filename.split('.')[-1]
filename = "%s_sm.%s" % (instance.slug, ext)
date = instance.date_created
datepath_path = datetime.date.today().strftime("%Y/%m/%d")
path = 'media/%s/%s/%s' % (datepath_path, instance.id, filename)
#overwrite_existing(path)
return path
| 34.285714
| 68
| 0.644167
| 159
| 1,200
| 4.735849
| 0.176101
| 0.023904
| 0.10093
| 0.143426
| 0.918991
| 0.887118
| 0.887118
| 0.887118
| 0.775564
| 0.775564
| 0
| 0.00404
| 0.175
| 1,200
| 34
| 69
| 35.294118
| 0.756566
| 0.06
| 0
| 0.703704
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148148
| false
| 0
| 0.037037
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ccfaf51f2e72691e9b679a536ac64db983269d8c
| 12,175
|
py
|
Python
|
An Empirical Comparison of Supervised Learning Algorithms/utils.py
|
syeehyn/Machine-Learning-in-Practice
|
97ce9f767fa1d291a03b3b39899965382b5e88cf
|
[
"MIT"
] | 1
|
2019-08-07T02:34:22.000Z
|
2019-08-07T02:34:22.000Z
|
An Empirical Comparison of Supervised Learning Algorithms/utils.py
|
syeehyn/Machine-Learning-in-Practice
|
97ce9f767fa1d291a03b3b39899965382b5e88cf
|
[
"MIT"
] | null | null | null |
An Empirical Comparison of Supervised Learning Algorithms/utils.py
|
syeehyn/Machine-Learning-in-Practice
|
97ce9f767fa1d291a03b3b39899965382b5e88cf
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
from sklearn.preprocessing import OneHotEncoder
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore')
import seaborn as sns
from sklearn.metrics import accuracy_score
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn import preprocessing
def classifier_compare(X, y, clf_1, clf_2, clf_3, split_size):
def draw_heatmap_linear(acc, acc_desc, C_list, parameter):
plt.figure(figsize = (2,4))
ax = sns.heatmap(acc, annot=True, fmt='.3f', yticklabels = C_list, xticklabels=[])
ax.collections[0].colorbar.set_label("accuracy")
ax.set(ylabel=parameter)
plt.title(acc_desc + ' w.r.t '+ parameter)
sns.set_style("whitegrid", {'axes.grid' : False})
plt.show()
_df = pd.DataFrame(index = [type(clf_1[0]).__name__, type(clf_2[0]).__name__, type(clf_3[0]).__name__])
partitions = [str(int(i * 100)) + '/' + str(int(100 - i * 100)) for i in split_size]
clf_1_final = {i : {} for i in partitions}
clf_2_final = {i : {} for i in partitions}
clf_3_final = {i : {} for i in partitions}
for i in split_size:
key = str(int(i * 100)) + '/' + str(int(100 - i * 100))
clf_1_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
clf_2_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
clf_3_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
training_acc_clf_1, validation_acc_clf_1, testing_acc_clf_1 = [], [], []
training_acc_clf_2, validation_acc_clf_2, testing_acc_clf_2 = [], [], []
training_acc_clf_3, validation_acc_clf_3, testing_acc_clf_3 = [], [], []
best_param_1, best_param_2, best_param_3= [], [], []
for rs in range(3):
# split data
X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size = i, random_state = rs)
# cv on the data
clf = GridSearchCV(clf_1[0], clf_1[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_1.append(train_scores[clf.best_index_])
validation_acc_clf_1.append(val_scores[clf.best_index_])
best_param_1.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_1.append(accuracy_score(Y_test, Y_pred))
# draw heatmap
draw_heatmap_linear(train_scores.reshape(-1,1), \
type(clf_1[0]).__name__ + "train accuracy", \
list(clf_1[1].values())[0],\
list(clf_1[1].keys())[0])
draw_heatmap_linear(val_scores.reshape(-1,1), \
type(clf_1[0]).__name__ + "val accuracy",\
list(clf_1[1].values())[0],\
list(clf_1[1].keys())[0])
clf = GridSearchCV(clf_2[0], clf_2[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_2.append(train_scores[clf.best_index_])
validation_acc_clf_2.append(val_scores[clf.best_index_])
best_param_2.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_2.append(accuracy_score(Y_test, Y_pred))
# draw heatmap
draw_heatmap_linear(train_scores.reshape(-1,1), \
type(clf_2[0]).__name__ + "train accuracy", \
list(clf_2[1].values())[0], \
list(clf_2[1].keys())[0])
draw_heatmap_linear(val_scores.reshape(-1,1), \
type(clf_2[0]).__name__ + "val accuracy", \
list(clf_2[1].values())[0], \
list(clf_2[1].keys())[0])
# decision tree part
# cv on the data
clf = GridSearchCV(clf_3[0], clf_3[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_3.append(train_scores[clf.best_index_])
validation_acc_clf_3.append(val_scores[clf.best_index_])
best_param_3.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_3.append(accuracy_score(Y_test, Y_pred))
# draw heatmap
draw_heatmap_linear(train_scores.reshape(-1,1), \
type(clf_3[0]).__name__ + "train accuracy", \
list(clf_3[1].values())[0], \
list(clf_3[1].keys())[0])
draw_heatmap_linear(val_scores.reshape(-1,1), \
type(clf_3[0]).__name__ + "val accuracy", \
list(clf_3[1].values())[0], \
list(clf_3[1].keys())[0])
clf_1_result['train'] = training_acc_clf_1
clf_1_result['validation'] = validation_acc_clf_1
clf_1_result['test'] = testing_acc_clf_1
clf_1_result['best_params'] = best_param_1
clf_2_result['train'] = training_acc_clf_2
clf_2_result['validation'] = validation_acc_clf_2
clf_2_result['test'] = testing_acc_clf_2
clf_2_result['best_params'] = best_param_2
clf_3_result['train'] = training_acc_clf_3
clf_3_result['validation'] = validation_acc_clf_3
clf_3_result['test'] = testing_acc_clf_3
clf_3_result['best_params'] = best_param_3
clf_1_final[key] = clf_1_result
clf_2_final[key] = clf_2_result
clf_3_final[key] = clf_3_result
return {type(clf_1[0]).__name__: clf_1_final,
type(clf_2[0]).__name__: clf_2_final,
type(clf_3[0]).__name__: clf_3_final}
def generate_results(output):
df = pd.Series(output).apply(lambda x: pd.DataFrame(x).T)
out = df.iloc[0].append(df.iloc[1]).append(df.iloc[2])
new_ind = pd.Series([i for i in out.index]) + pd.Series([
' RF', ' RF', ' RF',
' LOGREG ', ' LOGREG ', ' LOGREG ',
' BST-DT ', ' BST-DT ', ' BST-DT '
])
out.index = new_ind
out['best_params'] = out['best_params']\
.apply(lambda x: {list(x[0].keys())[0] : pd.Series([list(i.values())[0] for i in x]).unique().tolist()})
out['mean_train_acc'] = out['train'].apply(lambda x: round(np.mean(x),4))
out['mean_validation_acc'] = out['validation'].apply(lambda x: round(np.mean(x),4))
out['mean_test_acc'] = out['test'].apply(lambda x: round(np.mean(x),4))
out['train_acc_var'] = out['train'].apply(lambda x: round(np.std(x)**2,4))
out['test_acc_var'] = out['test'].apply(lambda x: round(np.std(x)**2,4))
out = out[['mean_train_acc','mean_validation_acc', 'mean_test_acc',\
'train_acc_var', 'test_acc_var', 'best_params']]
return out
def classifier_compare_var(X, y, clf_1, clf_2, clf_3, split_size):
_df = pd.DataFrame(index = [type(clf_1[0]).__name__, type(clf_2[0]).__name__, type(clf_3[0]).__name__])
partitions = [str(int(i * 100)) + '/' + str(int(100 - i * 100)) for i in split_size]
clf_1_final = {i : {} for i in partitions}
clf_2_final = {i : {} for i in partitions}
clf_3_final = {i : {} for i in partitions}
for i in split_size:
key = str(int(i * 100)) + '/' + str(int(100 - i * 100))
clf_1_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
clf_2_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
clf_3_result = {'train': [], 'validation': [],
'test' : [],'best_params': []}
training_acc_clf_1, validation_acc_clf_1, testing_acc_clf_1 = [], [], []
training_acc_clf_2, validation_acc_clf_2, testing_acc_clf_2 = [], [], []
training_acc_clf_3, validation_acc_clf_3, testing_acc_clf_3 = [], [], []
best_param_1, best_param_2, best_param_3= [], [], []
for rs in range(3):
# split data
X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size = i, random_state = rs)
# cv on the data
clf = GridSearchCV(clf_1[0], clf_1[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_1.append(train_scores[clf.best_index_])
validation_acc_clf_1.append(val_scores[clf.best_index_])
best_param_1.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_1.append(accuracy_score(Y_test, Y_pred))
clf = GridSearchCV(clf_2[0], clf_2[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_2.append(train_scores[clf.best_index_])
validation_acc_clf_2.append(val_scores[clf.best_index_])
best_param_2.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_2.append(accuracy_score(Y_test, Y_pred))
# decision tree part
# cv on the data
clf = GridSearchCV(clf_3[0], clf_3[1], return_train_score= True, cv = 5, n_jobs= -1)
clf.fit(X_train, Y_train)
train_scores = clf.cv_results_['mean_train_score']
val_scores = clf.cv_results_['mean_test_score']
# append trainnig validation and testing accuarcy
training_acc_clf_3.append(train_scores[clf.best_index_])
validation_acc_clf_3.append(val_scores[clf.best_index_])
best_param_3.append(clf.best_params_)
Y_pred = clf.predict(X_test)
testing_acc_clf_3.append(accuracy_score(Y_test, Y_pred))
clf_1_result['train'] = training_acc_clf_1
clf_1_result['validation'] = validation_acc_clf_1
clf_1_result['test'] = testing_acc_clf_1
clf_1_result['best_params'] = best_param_1
clf_2_result['train'] = training_acc_clf_2
clf_2_result['validation'] = validation_acc_clf_2
clf_2_result['test'] = testing_acc_clf_2
clf_2_result['best_params'] = best_param_2
clf_3_result['train'] = training_acc_clf_3
clf_3_result['validation'] = validation_acc_clf_3
clf_3_result['test'] = testing_acc_clf_3
clf_3_result['best_params'] = best_param_3
clf_1_final[key] = clf_1_result
clf_2_final[key] = clf_2_result
clf_3_final[key] = clf_3_result
return {type(clf_1[0]).__name__: clf_1_final,
type(clf_2[0]).__name__: clf_2_final,
type(clf_3[0]).__name__: clf_3_final}
| 50.729167
| 108
| 0.589815
| 1,672
| 12,175
| 3.881579
| 0.086124
| 0.049923
| 0.038829
| 0.033282
| 0.82265
| 0.813097
| 0.809399
| 0.804777
| 0.804777
| 0.794915
| 0
| 0.03519
| 0.28345
| 12,175
| 240
| 109
| 50.729167
| 0.708735
| 0.036632
| 0
| 0.744898
| 0
| 0
| 0.079406
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020408
| false
| 0
| 0.071429
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69296eef6fbfd45d3870738b8f837c229c8917ba
| 71
|
py
|
Python
|
py/lukai/test_lukai.py
|
d4l3k/lukai
|
d9bce91a98367cb4daa8d99421885aa2816499ad
|
[
"MIT"
] | 23
|
2017-09-11T21:04:47.000Z
|
2022-03-22T14:36:19.000Z
|
py/lukai/test_lukai.py
|
d4l3k/lukai
|
d9bce91a98367cb4daa8d99421885aa2816499ad
|
[
"MIT"
] | 10
|
2019-12-16T20:59:48.000Z
|
2022-02-09T23:44:30.000Z
|
py/lukai/test_lukai.py
|
d4l3k/lukai
|
d9bce91a98367cb4daa8d99421885aa2816499ad
|
[
"MIT"
] | 4
|
2018-05-31T20:29:22.000Z
|
2019-03-15T02:45:14.000Z
|
import lukai
from lukai import tfjs, saver
def test_lukai():
pass
| 11.833333
| 29
| 0.732394
| 11
| 71
| 4.636364
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211268
| 71
| 5
| 30
| 14.2
| 0.910714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
694ce35005ca2c17552ba55642f5fd40323a6b69
| 81,163
|
py
|
Python
|
GenerateComparisonPresentation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
GenerateComparisonPresentation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
GenerateComparisonPresentation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
File name: GenerateComparisonPresentation.py
Package: resq
Written by: Marie Jankujova - jankujova.marie@fnusa.cz on 11-2018
Version: v1.0
Version comment: The first version of script which generate comaparison into presentation in PPTX format.
Description: This script is used to generate comparison charts into presentation.
"""
import pandas as pd
import sys
import os
import sqlite3
from datetime import datetime, date
import math
import pytz
import xlsxwriter
import csv
from pptx import Presentation
from pptx.chart.data import CategoryChartData, ChartData
from pptx.enum.shapes import MSO_SHAPE
from pptx.enum.chart import XL_CHART_TYPE, XL_TICK_MARK, XL_TICK_LABEL_POSITION, XL_LEGEND_POSITION, XL_LABEL_POSITION
from pptx.enum.text import MSO_AUTO_SIZE
from pptx.util import Cm, Pt, Inches
from pptx.dml.color import RGBColor
from pptx.enum.dml import MSO_LINE
from pptx.oxml.xmlchemy import OxmlElement
class GeneratePeriodCompPresentation:
""" The class generating comparison graphs for nationally samples between two periods of times.
:param ndf1: the dataframe containing calculated statistics for the first period
:type ndf1: pandas dataframe
:param ndf2: the dataframe containing calculated statistics for the second period
:type ndf2: pandas dataframe
:param name1: the name of the 1st period, eg. 2017
:type name1: str
:param name2: the name of the 2nd period, eg. 2017
:type name2: str
"""
def __init__(self, ndf1, ndf2, name1, name2):
self.ndf1 = ndf1
self.ndf2 = ndf2
self.name1 = name1
self.name2 = name2
# Get absolute path to the database.
script_dir = os.path.dirname(__file__)
filename = "resq.db"
self.abs_db_path = os.path.join(script_dir, "database", filename)
master_pptx = "countries_comparison.pptx"
self.master = os.path.normpath(os.path.join(script_dir, "backgrounds", master_pptx))
self._generate_graphs()
def _generate_graphs(self):
""" The function generating graphs into the presentation. The final name of the presentation is comparison_two_periods.pptx. """
prs = Presentation(self.master) # Read template presentation
first_slide = prs.slides[0]
shape = first_slide.shapes[5]
text_frame = shape.text_frame
first_slide_text = "Data Comparison" # Set title
p = text_frame.paragraphs[0]
run = p.add_run()
run.text = first_slide_text
font = run.font
font.name = 'Calibri Light'
font.size = Pt(26)
font.color.rgb = RGBColor(250,250,250)
# if (self.country_name in ['Ukraine', 'Poland'] and len(df) > 2):
# main_col = 'Site ID'
# else:
main_col = 'Site Name'
########################
#### TOTAL PATIENTS ####
########################
column_name = 'Total Patients'
# Nationally df
tmp_ndf1 = self.ndf1[[main_col, column_name]]
tmp_ndf1 = tmp_ndf1.sort_values([column_name], ascending = True)
# Site-level df
tmp_ndf2 = self.ndf2[[main_col, column_name]]
tmp_ndf2 = tmp_ndf2.sort_values([column_name], ascending=True)
# Merge dataframes
tmp_df = pd.merge(tmp_ndf1, tmp_ndf2, how='right', on=['Site Name'])
tmp_df.fillna(0, inplace=True)
tmp_df = tmp_df.sort_values([main_col], ascending=True)
title = "Total patients enrolled"
legend = [self.name1, self.name2]
GeneratePeriodCompGraph(df=tmp_df, presentation=prs, column_name=column_name, title=title, number_of_series=len(legend), legend=legend)
###########################################
#### PATIENTS RECEIVING RECANALIZATION ####
###########################################
column_name = '% patients recanalized'
# Nationally df
tmp_ndf1 = self.ndf1[[main_col, column_name]]
tmp_ndf1 = tmp_ndf1.sort_values([column_name], ascending = True)
# Site-level df
tmp_ndf2 = self.ndf2[[main_col, column_name]]
tmp_ndf2 = tmp_ndf2.sort_values([column_name], ascending=True)
# Merge dataframes
tmp_df = pd.merge(tmp_ndf1, tmp_ndf2, how='right', on=['Site Name'])
tmp_df.fillna(0, inplace=True)
tmp_df = tmp_df.sort_values([main_col], ascending=True)
title = "% patients receiving recanalization procedures"
subtitle = "Calculated out of number of IS"
legend = [self.name1, self.name2]
GeneratePeriodCompGraph(df=tmp_df, presentation=prs, column_name=column_name, title=title, subtitle=subtitle, number_of_series=len(legend), legend=legend)
##############################
#### MEDIAN DTN (MINUTES) ####
##############################
column_name = 'Median DTN (minutes)'
# Nationally df
tmp_ndf1 = self.ndf1[[main_col, column_name]]
tmp_ndf1 = tmp_ndf1.sort_values([column_name], ascending = True)
# Site-level df
tmp_ndf2 = self.ndf2[[main_col, column_name]]
tmp_ndf2 = tmp_ndf2.sort_values([column_name], ascending=True)
# Merge dataframes
tmp_df = pd.merge(tmp_ndf1, tmp_ndf2, how='right', on=['Site Name'])
tmp_df.fillna(0, inplace=True)
tmp_df = tmp_df.sort_values([main_col], ascending=True)
title = "Median Door-to-Needle Time (DTN), in minutes"
legend = [self.name1, self.name2]
GeneratePeriodCompGraph(df=tmp_df, presentation=prs, column_name=column_name, title=title, number_of_series=len(legend), legend=legend)
##############################
#### MEDIAN DTG (MINUTES) ####
##############################
column_name = 'Median DTG (minutes)'
# Nationally df
tmp_ndf1 = self.ndf1[[main_col, column_name]]
tmp_ndf1 = tmp_ndf1.sort_values([column_name], ascending = True)
# Site-level df
tmp_ndf2 = self.ndf2[[main_col, column_name]]
tmp_ndf2 = tmp_ndf2.sort_values([column_name], ascending=True)
# Merge dataframes
tmp_df = pd.merge(tmp_ndf1, tmp_ndf2, how='right', on=['Site Name'])
tmp_df.fillna(0, inplace=True)
tmp_df = tmp_df.sort_values([main_col], ascending=True)
title = "Median Door-to-Needle Time (DTG), in minutes"
legend = [self.name1, self.name2]
GeneratePeriodCompGraph(df=tmp_df, presentation=prs, column_name=column_name, title=title, number_of_series=len(legend), legend=legend)
# set pptx output name (for cz it'll be presentation_CZ.pptx)
working_dir = os.getcwd()
pptx = "comparison_two_periods.pptx"
presentation_path = os.path.normpath(os.path.join(working_dir, pptx))
prs.save(presentation_path)
class GeneratePeriodCompGraph:
""" The class generating comparison graphs for given periods .
:param df: the temporary dataframe created in :class:`resqdb.GenerateComparisonPresentation.GeneratePeriodCompPresentation` class
:type df: pandas dataframe
:param presentation: the presentation opened in :class:`resqdb.GenerateComparisonPresentation.GeneratePeriodCompPresentation` class
:type presentation: Presentation object
:param column_name: the name of column which data should be shown in the graph
:type column_name: str
:param title: the title of the graph
:type title: str
:param subtitle: the subtitle of the graph
:type subtitle: str
:param number_of_series: the number of columns to be shown (stacked graphs)
:type number_of_series: int
:param legend: the legend if the graph is stacked
:type legend: list of strings
"""
def __init__(self, df, presentation, column_name, title, subtitle="", number_of_series=0, legend=None):
self.df = df
self.presentation = presentation
self.title = title
self.column_name = column_name
self.number_of_series = number_of_series
self.font_name = 'Roboto'
self.legend = legend
self.subtitle = subtitle
self.category_font_size = Pt(10)
self.data_label_font_size = Pt(11)
self.categories_column = 'Site Name'
self._create_column_clustered_barplot()
def _get_length_of_legend(self, legend):
""" The function adjusting the number of letters in legend to quess the number of columns in the legend!
:param legend: the names of legend
:type legend: list
:returns: the adjusted number of letters
"""
count = 0
for i in legend:
count = count + len(i)
return count
def _create_column_clustered_barplot(self):
""" The function creating the clustered barplot. """
maximum = 0
column_names = self.df.columns.tolist()
index = column_names.index(self.categories_column)
# Add slide to presentation (layout 11 is our custom layout where only title 'Agency FB', color: RGBColor(43, 88, 173) and size:24 is set)
slide = self.presentation.slides.add_slide(self.presentation.slide_layouts[11])
# Get title object
title_placeholders = slide.shapes.title
title_placeholders.text = self.title
# If subtitle is not set, remove placeholder for the subtitle from page.
if self.subtitle == "":
subtitle = slide.placeholders[1]
sp = subtitle.element
sp.getparent().remove(sp)
else:
subtitle = slide.placeholders[1]
subtitle.text = self.subtitle
# 1st chart (left side) - nationally sample
chart_data = ChartData()
chart_data.categories = self.df[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(self.legend[0], self.df[column_names[index+1]].tolist())
if (self.number_of_series >= 2):
chart_data.add_series(self.legend[1], self.df[column_names[index+2]].tolist())
# Set margins.
specs = {
'height': Cm(16.5),
'width': Cm(32),
'left': Cm(0.7),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.COLUMN_CLUSTERED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
# Get series of chart
series = chart.series[0]
# Get plot
plot = chart.plots[0]
plot.gap_width = 220
plot.overlap = -25
# Set for each bar same color
plot.vary_by_categories = False
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = self.category_font_size
tick_labels.font.name = self.font_name
# Don't show major gridlines
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.color.rgb = RGBColor(217, 217, 217)
value_axis.major_gridlines.format.line.width = Pt(0.5)
value_axis.major_tick_mark = XL_TICK_MARK.NONE
value_axis.format.line.color.rgb = RGBColor(217, 217, 217)
# Set range of axis
#value_axis.maximum_scale = ndf_maximum
value_axis.minimum_scale = 0
# Value for y-axis (change font size, name, and other things)
category_axis = chart.category_axis
category_axis.format.line.color.rgb = RGBColor(217, 217, 217)
# Delete tick marks
category_axis.major_tick_mark = XL_TICK_MARK.NONE
#category_axis.major_unit = 1
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
# Set legend
chart.has_legend = True
chart.legend.position = XL_LEGEND_POSITION.BOTTOM
chart.legend.include_in_layout = False
chart.legend.font.name = self.font_name
class GenerateCountriesCompPresentation:
""" The class creating presentation with the comparison between national samples and site samples in one period.
:param ndf: the calculated statistics for the national samples
:type ndf: pandas dataframe
:param sldf: the calculated statistics for the site samples
:type sldf: pandas dataframe
:param name: the name of the period, eg. 2017
:type name: str
:param samples: the list of countries which should be colored by different color in the main graphs
:type samples: list of string
"""
def __init__(self, ndf, sldf, name="", samples=[]):
self.ndf = ndf
self.sldf = sldf
self.name = name
# Get absolute path to the database.
script_dir = os.path.dirname(__file__) #<-- absolute dir the script is in
filename = "resq.db"
self.abs_db_path = os.path.join(script_dir, "database", filename)
master_pptx = "countries_comparison.pptx"
self.master = os.path.normpath(os.path.join(script_dir, "backgrounds", master_pptx))
def select_country(value):
""" The function obtaining the country name from the package pytz based on the country code.
:param value: the country code
:type value: str
:returns: the country name
"""
if value == "UZB":
value = 'UZ'
country_name = pytz.country_names[value]
return country_name
# If country is used as site, the country name is selected from countries dictionary by country code. :
if len(samples) > 0:
self.nationally_countries = []
for i in range(0, len(samples)):
country = select_country(samples[i])
self.nationally_countries.append(country)
self._generate_graphs()
def _generate_graphs(self):
""" The function generating graphs into the presentation! """
prs = Presentation(self.master) # Read the template presentation
first_slide = prs.slides[0]
shape = first_slide.shapes[5]
text_frame = shape.text_frame
first_slide_text = "Data Comparison"
p = text_frame.paragraphs[0]
run = p.add_run()
run.text = first_slide_text
font = run.font
font.name = 'Century Gothic'
font.size = Pt(20)
font.color.rgb = RGBColor(250,250,250)
# if (self.country_name in ['Ukraine', 'Poland'] and len(df) > 2):
# main_col = 'Site ID'
# else:
main_col = 'Site Name'
########################
#### TOTAL PATIENTS ####
########################
column_name = 'Total Patients'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
# combine them to one dataframe
tmp_df = tmp_ndf.append(tmp_sldf)
tmp_df = tmp_df.sort_values([column_name], ascending=True)
title = 'Total number of cases - admission date in {}'.format(self.name)
GenerateCountriesCompGraphs(ndf=tmp_df, sldf=None, presentation=prs, title=title, column_name=column_name)
########################
#### TOTAL PATIENTS ####
########################
column_name = 'Total Patients'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
# combine them to one dataframe
tmp_df = tmp_ndf.append(tmp_sldf)
tmp_df = tmp_df.sort_values([column_name], ascending=True)
title = 'Total number of cases - admission date in {}'.format(self.name)
GenerateCountriesCompGraphs(ndf=tmp_df, sldf=None, presentation=prs, title=title, column_name=column_name, samples=self.nationally_countries)
############################
#### MEDIAN PATIENT AGE ####
############################
column_name = 'Median patient age'
title = 'Median patient age'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
###############
# STROKE TYPE #
###############
column_name = '% stroke type - ischemic stroke'
legend = ['ischemic', 'transient ischemic attack', 'intracerebral hemorrhage', 'subarrachnoid hemorrhage', 'cerebral venous thrombosis', 'undetermined']
tmp_ndf = self.ndf[[main_col, '% stroke type - ischemic stroke', '% stroke type - transient ischemic attack', '% stroke type - intracerebral hemorrhage', '% stroke type - subarrachnoid hemorrhage', '% stroke type - cerebral venous thrombosis', '% stroke type - undetermined stroke']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% stroke type - ischemic stroke', '% stroke type - transient ischemic attack', '% stroke type - intracerebral hemorrhage', '% stroke type - subarrachnoid hemorrhage', '% stroke type - cerebral venous thrombosis', '% stroke type - undetermined stroke']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "Stroke type, ordered by % ischemic stroke"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='stacked')
#######################
### DEPARTMENT TYPE ###
#######################
column_name = '% department type - neurology'
legend = ['neurology', 'neurosurgery', 'anesthesiology resuscitation critical care', 'internal medicine', 'geriatrics', 'other']
tmp_ndf = self.ndf[[main_col, '% department type - neurology', '% department type - neurosurgery', '% department type - anesthesiology/resuscitation/critical care', '% department type - internal medicine', '% department type - geriatrics', '% department type - Other']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% department type - neurology', '% department type - neurosurgery', '% department type - anesthesiology/resuscitation/critical care', '% department type - internal medicine', '% department type - geriatrics', '% department type - Other']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "Department type, ordered by % neurology"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='stacked')
###################################
### HOSPITALIZATION DESTINATION ###
###################################
column_name = '% patients hospitalized in stroke unit / ICU'
legend = ['% stroke unit / ICU', '% monitored bed with telemetry', '% standard bed']
tmp_ndf = self.ndf[[main_col, '% patients hospitalized in stroke unit / ICU', '% patients hospitalized in monitored bed with telemetry', '% patients hospitalized in standard bed']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% patients hospitalized in stroke unit / ICU', '% patients hospitalized in monitored bed with telemetry', '% patients hospitalized in standard bed']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "Hospitalization type, ordered by % stroke unit"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='stacked')
##########################
### CT / MRI performed ###
##########################
column_name = '% CT/MRI - performed'
legend = ['% performed', '% performed within 1 hour after admission']
tmp_ndf = self.ndf[[main_col, '% CT/MRI - performed', '% CT/MRI - Performed within 1 hour after admission']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% CT/MRI - performed', '% CT/MRI - Performed within 1 hour after admission']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "% patients receiving CT / MRI"
subtitle = "- Calculated out of number of IS + ICH + TIA + CVT -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='grouped', subtitle=subtitle)
############################
# RECANALIZATION TREATMENT #
############################
"""
column_name = '% recanalization procedures - IV tPa'
legend = ['IV tPa', 'IV tPa + endovascular treatment', 'endovascular treatment', 'IV tPa + another centre for endovascular treatment']
tmp_ndf = self.ndf[[main_col, '% patients recanalized', '% recanalization procedures - IV tPa', '% recanalization procedures - IV tPa + endovascular treatment', '% recanalization procedures - Endovascular treatment alone', '% recanalization procedures - IV tPa + referred to another centre for endovascular treatment']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% patients recanalized', '% recanalization procedures - IV tPa', '% recanalization procedures - IV tPa + endovascular treatment', '% recanalization procedures - Endovascular treatment alone', '% recanalization procedures - IV tPa + referred to another centre for endovascular treatment']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "% patients receiving recanalization procedures"
subtitle = "- Calculated out of number of IS -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='stacked', subtitle=subtitle)
"""
column_name = '% patients recanalized'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients receiving recanalization procedures"
subtitle = "- Calculated out of number of IS -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
column_name = '% IV tPa'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients receiving thrombolysis"
subtitle = "- Calculated out of number of IS -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
column_name = '% TBY'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients receiving mechanical thrombectomy"
subtitle = "- Calculated out of number of IS -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
################
# % median DTN #
################
column_name = 'Median DTN (minutes)'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = False)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=False)
title = "Median Door-to-Needle Time (DTN), in minutes"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
#######################
# dysphagia screening #
#######################
column_name = '% dysphagia screening - Guss test'
column_names = ['% dysphagia screening - Guss test', '% dysphagia screening - Other test', '% dysphagia screening - Another centre', '% dysphagia screening - Unable to test']
legend = ['% GUSS test', '% Other test', '% Another centre', '% Unable to test']
tmp_ndf = self.ndf[[main_col, '% dysphagia screening - Guss test', '% dysphagia screening - Other test', '% dysphagia screening - Another centre', '% dysphagia screening - Unable to test']]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
tmp_sldf = self.sldf[[main_col, '% dysphagia screening - Guss test', '% dysphagia screening - Other test', '% dysphagia screening - Another centre', '% dysphagia screening - Unable to test']]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending = True)
title = "% patients screened for dysphagia, ordered by % GUSS test"
subtitle = "- Calculated out of number of IS + ICH + CVT -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, legend=legend, number_of_series=len(legend), graph_type='stacked', subtitle=subtitle)
#################################
# % ASSESSED FOR REHABILITATION #
#################################
column_name = '% patients assessed for rehabilitation - Yes'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients assessed for rehabilitation within 72 hrs after admission"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name)
##################################
# CAROTID ARTERIES IMAGING - YES #
##################################
column_name = '% carotid arteries imaging - Yes'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients receiving carotid arteries imaging"
subtitle = "- Calculated out of number of IS + TIA -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, subtitle=subtitle)
##################################
# PRESCRIPTION OF ANTICOAGULANTS #
##################################
# column_name = '% patients prescribed anticoagulants with aFib with CVT'
column_name = '% patients prescribed anticoagulants with aFib'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "Prescription of anticoagulants for patients with atrial fibrillation"
subtitle = "- Calculated out of number of patients with atrial fibrillation discharge alive -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, subtitle=subtitle)
###################################
# PRESCRIPTION OF ANTITHROMBOTICS #
###################################
column_name = '% patients prescribed antithrombotics with aFib'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "Prescription of antithrombotics for patients with atrial fibrillation"
subtitle = "- Calculated out of number of patients with atrial fibrillation discharge alive -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, subtitle=subtitle)
##########################
# DISCHARGE WITH STATINS #
##########################
column_name = '% patients prescribed statins - Yes'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients discharged on statins"
subtitle = "- Calculated out of number of IS + TIA -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, subtitle=subtitle)
#############################################################
# % ANTIHYPERTENSIVE MEDICATION PRESCRIBED out of all cases #
#############################################################
column_name = '% prescribed antihypertensives - Yes'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% patients prescribed antihypertensives at discharge"
subtitle = "- Calculated out of number of patients discharged alive -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, column_name=column_name, subtitle=subtitle)
###########################################################
# % RECOMMENDED TO A CEREBROVASCULAR EXPERT - RECOMMENDED #
###########################################################
column_name = '% recommended to a cerebrovascular expert - Recommended'
# Nationally df
tmp_ndf = self.ndf[[main_col, column_name]]
tmp_ndf = tmp_ndf.sort_values([column_name], ascending = True)
# Site-level df
tmp_sldf = self.sldf[[main_col, column_name]]
tmp_sldf = tmp_sldf.sort_values([column_name], ascending=True)
title = "% recommended to a cerebrovascular expert"
subtitle = "- Calculated out of number of total patients -"
GenerateCountriesCompGraphs(ndf=tmp_ndf, sldf=tmp_sldf, presentation=prs, title=title, subtitle=subtitle, column_name=column_name)
# set pptx output name (for cz it'll be presentation_CZ.pptx)
working_dir = os.getcwd()
pptx = "comparison.pptx"
presentation_path = os.path.normpath(os.path.join(working_dir, pptx))
prs.save(presentation_path)
class GenerateCountriesCompGraphs:
""" The class generating comparison graphs in presentation for nationally samples vs. site samples.
:param ndf: the calculated statistics for the national samples
:type ndf: pandas dataframe
:param presentation: the opened presentation document
:type presentation: Presentation object
:param column_name: the name of column to be included in the graph
:type column_name: str
:param title: the title of the slide
:type title: str
:param sldf: the calculated statistics for the site samples, can be `None` if Total Patients graph is generated
:type sldf: pandas dataframe
:param subtitle: the subtitle of the slide
:type subtitle: str
:param graph_type: the type of graph to be generated (normal barplot or stacked barplot)
:type graph_type: str
:param number_of_series: the number of columns included in the stacked barplot
:type number_of_series: int
:param legend: the list of values in the legend
:type legend: list
:param samples: the list of countries which should be displayed with different color
:type samples: list of string
"""
def __init__(self, ndf, presentation, column_name, title, sldf=None, subtitle="", graph_type=None, number_of_series=0, legend=None, samples=None):
self.ndf = ndf
self.sldf = sldf
self.presentation = presentation
self.ndf_title = "Nationally representative sample"
self.sldf_title = "Site-level representative sample"
self.title = title
self.column_name = column_name
self.number_of_series = number_of_series
self.legend = legend
self.subtitle = subtitle
self.samples = samples
self.font_name = 'Century Gothic'
self.category_font_size = Pt(10)
self.data_label_font_size = Pt(11)
self.categories_column = 'Site Name'
# Check type of graph
if (graph_type == 'stacked'):
self._create_stacked_barplot()
elif (graph_type == 'grouped'):
self._create_grouped_barplot()
else:
self._create_barplot()
def _get_length_of_legend(self, legend):
""" The function adjusting the number of letters in legend to quess the number of columns in the legend! """
count = 0
for i in legend:
count = count + len(i)
return count
def _create_barplot(self):
""" The function generating into the presentation the normal barplot. """
maximum = 0
# If graph is in %, set maximum valut to 100.
if '%' in self.title.lower():
ndf_maximum = 100
sldf_maximum = 100
elif "total number of cases" in self.title.lower():
ndf_maximum = round((max(self.ndf[self.column_name].tolist())),1)
else:
ndf_maximum = round((max(self.ndf[self.column_name].tolist())),1)
sldf_maximum = round((max(self.sldf[self.column_name].tolist())),1)
# Add slide to presentation (layout 11 is our custom layout where only title 'Agency FB', color: RGBColor(43, 88, 173) and size:24 is set)
slide = self.presentation.slides.add_slide(self.presentation.slide_layouts[11])
# Get title object
title_placeholders = slide.shapes.title
# Set title
title_placeholders.text = self.title
# If subtitle is not set, then delete placeholder for subtitle in the slide.
if self.subtitle == "":
subtitle = slide.placeholders[1]
sp = subtitle.element
sp.getparent().remove(sp)
else:
subtitle = slide.placeholders[1]
subtitle.text = self.subtitle
# 1st chart (left side) - nationally sample
chart_data = ChartData()
chart_data.categories = self.ndf[self.categories_column].tolist()
chart_data.add_series(self.column_name, self.ndf[self.column_name].tolist())
if "total number of cases" in self.title.lower():
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(32),
'left': Cm(0.7),
'top': Cm(2)
}
else:
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_CLUSTERED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
# Get series of chart
series = chart.series[0]
# If graphs for whole country are generated, set for bar with country with red color
# else set to blue color (same color as title uses)
if "total number of cases" in self.title.lower() and self.samples is not None:
site_names = self.ndf[self.categories_column].tolist()
for idx, point in enumerate(series.points):
fill = point.format.fill
fill.solid()
if (site_names[idx] in self.samples):
fill.fore_color.rgb = RGBColor(128,0,0)
else:
fill.fore_color.rgb = RGBColor(43, 88, 173)
else:
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
# Get plot
plot = chart.plots[0]
# Set for each bar same color
plot.vary_by_categories = False
# Show data labels and set font
plot.has_data_labels = True
data_labels = plot.data_labels
data_labels.font.size = self.data_label_font_size
data_labels.font.bold = True
data_labels.font.name = self.font_name
# Change color of graph title and set color gray
if "total number of cases" not in self.title.lower():
chart_text = chart.chart_title.text_frame
chart_text.text = self.ndf_title
chart_text.paragraphs[0].font.size = Pt(18)
chart_text.paragraphs[0].font.color.rgb = RGBColor(89, 89, 89)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = self.category_font_size
tick_labels.font.name = self.font_name
# Don't show major gridlines
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = False
# Set range of axis
value_axis.maximum_scale = ndf_maximum
value_axis.minimum_scale = 0
# Value for y-axis (change font size, name, and other things)
category_axis = chart.category_axis
# Delete tick marks
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_axis.major_unit = 1
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
if "total number of cases" not in self.title.lower():
# 2nd graph (right side) - site-level samples
chart_data = ChartData()
chart_data.categories = self.sldf[self.categories_column].tolist()
chart_data.add_series(self.column_name, self.sldf[self.column_name].tolist())
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_CLUSTERED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
# Get series of chart
series = chart.series[0]
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
# Get plot
plot = chart.plots[0]
# Set for each bar same color
plot.vary_by_categories = False
# Show data labels and set font
plot.has_data_labels = True
data_labels = plot.data_labels
data_labels.font.size = self.data_label_font_size
data_labels.font.bold = True
data_labels.font.name = self.font_name
# Change color of graph title and set color gray
chart_text = chart.chart_title.text_frame
chart_text.text = self.sldf_title
chart_text.paragraphs[0].font.size = Pt(18)
chart_text.paragraphs[0].font.color.rgb = RGBColor(89, 89, 89)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = self.category_font_size
tick_labels.font.name = self.font_name
# Don't show major gridlines
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = False
# Set range of axis
value_axis.maximum_scale = sldf_maximum
value_axis.minimum_scale = 0
# Value for y-axis (change font size, name, and other things)
category_axis = chart.category_axis
# Delete tick marks
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_axis.major_unit = 1
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
def _create_stacked_barplot(self):
""" The function generating into the presentation the stacked barplot. """
# Calculate length of legend (in case that legend is too long, make smaller font size)
count = self._get_length_of_legend(self.legend)
# Get column names of dataframe
column_names = self.ndf.columns.tolist()
index = column_names.index(self.column_name)
# Add new slide into presentation
slide = self.presentation.slides.add_slide(self.presentation.slide_layouts[11])
title_placeholders = slide.shapes.title
title_placeholders.text = self.title
if self.subtitle == "":
subtitle = slide.placeholders[1]
sp = subtitle.element
sp.getparent().remove(sp)
else:
subtitle = slide.placeholders[1]
subtitle.text = self.subtitle
# 1st dataframe (nationally sample)
chart_data = ChartData()
chart_data.categories = self.ndf[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(self.legend[0], self.ndf[column_names[index]].tolist())
if (self.number_of_series >= 2):
chart_data.add_series(self.legend[1], self.ndf[column_names[index+1]].tolist())
if (self.number_of_series >= 3):
chart_data.add_series(self.legend[2], self.ndf[column_names[index+2]].tolist())
if (self.number_of_series >= 4):
chart_data.add_series(self.legend[3], self.ndf[column_names[index+3]].tolist())
if (self.number_of_series >= 5):
chart_data.add_series(self.legend[4], self.ndf[column_names[index+4]].tolist())
if (self.number_of_series >= 6):
chart_data.add_series(self.legend[5], self.ndf[column_names[index+5]].tolist())
if (self.number_of_series >= 7):
chart_data.add_series(self.legend[6], self.ndf[column_names[index+6]].tolist())
if (self.number_of_series >= 8):
chart_data.add_series(self.legend[7], self.ndf[column_names[index+7]].tolist())
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_STACKED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
series = chart.series[0]
# If graphs for whole country are generated, set for bar with country with red color
# else set to blue color (same color as title uses)
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
if (self.number_of_series >= 5):
series = chart.series[4]
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(80, 137, 188)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = Pt(11)
tick_labels.font.name = self.font_name
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.dash_style = MSO_LINE.DASH
value_axis.major_gridlines.format.line.width = Pt(0.5)
value_axis.maximum_scale = 100
category_axis = chart.category_axis
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
category_labels.tickLblSkip = 1
# 2nd dataframe (nationally sample)
# Get column names of dataframe
column_names = self.sldf.columns.tolist()
index = column_names.index(self.column_name)
chart_data = ChartData()
chart_data.categories = self.sldf[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(self.legend[0], self.sldf[column_names[index]].tolist())
if (self.number_of_series >= 2):
chart_data.add_series(self.legend[1], self.sldf[column_names[index+1]].tolist())
if (self.number_of_series >= 3):
chart_data.add_series(self.legend[2], self.sldf[column_names[index+2]].tolist())
if (self.number_of_series >= 4):
chart_data.add_series(self.legend[3], self.sldf[column_names[index+3]].tolist())
if (self.number_of_series >= 5):
chart_data.add_series(self.legend[4], self.sldf[column_names[index+4]].tolist())
if (self.number_of_series >= 6):
chart_data.add_series(self.legend[5], self.sldf[column_names[index+5]].tolist())
if (self.number_of_series >= 7):
chart_data.add_series(self.legend[6], self.sldf[column_names[index+6]].tolist())
if (self.number_of_series >= 8):
chart_data.add_series(self.legend[7], self.sldf[column_names[index+7]].tolist())
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_STACKED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
series = chart.series[0]
# If graphs for whole country are generated, set for bar with country with red color
# else set to blue color (same color as title uses)
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
if (self.number_of_series >= 5):
series = chart.series[4]
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(80, 137, 188)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = Pt(11)
tick_labels.font.name = self.font_name
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.dash_style = MSO_LINE.DASH
value_axis.major_gridlines.format.line.width = Pt(0.5)
value_axis.maximum_scale = 100
category_axis = chart.category_axis
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
category_labels.tickLblSkip = 1
# Set legend
chart.has_legend = True
chart.legend.position = XL_LEGEND_POSITION.BOTTOM
chart.legend.include_in_layout = False
chart.legend.font.name = self.font_name
if (count > 180 or 'antithrombotics prescribed' in self.title.lower()):
chart.legend.font.size = Pt(11)
else:
chart.legend.font.size = Pt(12)
def _create_grouped_barplot(self):
""" The function generating into the presentation the grouped barplot. """
# Calculate length of legend (in case that legend is too long, make smaller font size)
count = self._get_length_of_legend(self.legend)
# Get column names of dataframe
column_names = self.ndf.columns.tolist()
index = column_names.index(self.column_name)
# Add new slide into presentation
slide = self.presentation.slides.add_slide(self.presentation.slide_layouts[11])
title_placeholders = slide.shapes.title
title_placeholders.text = self.title
if self.subtitle == "":
subtitle = slide.placeholders[1]
sp = subtitle.element
sp.getparent().remove(sp)
else:
subtitle = slide.placeholders[1]
subtitle.text = self.subtitle
# 1st dataframe (nationally sample)
chart_data = ChartData()
chart_data.categories = self.ndf[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(self.legend[0], self.ndf[column_names[index]].tolist())
chart_data.add_series(self.legend[1], self.ndf[column_names[index+1]].tolist())
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_CLUSTERED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
series = chart.series[0]
# If graphs for whole country are generated, set for bar with country with red color
# else set to blue color (same color as title uses)
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
if (self.number_of_series >= 5):
series = chart.series[4]
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(80, 137, 188)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = Pt(11)
tick_labels.font.name = self.font_name
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.dash_style = MSO_LINE.DASH
value_axis.major_gridlines.format.line.width = Pt(0.5)
value_axis.maximum_scale = 100
category_axis = chart.category_axis
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
category_labels.tickLblSkip = 1
# 2nd dataframe (nationally sample)
# Calculate length of legend (in case that legend is too long, make smaller font size)
count = self._get_length_of_legend(self.legend)
# Get column names of dataframe
column_names = self.sldf.columns.tolist()
index = column_names.index(self.column_name)
chart_data = ChartData()
chart_data.categories = self.sldf[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(self.legend[0], self.sldf[column_names[index]].tolist())
chart_data.add_series(self.legend[1], self.sldf[column_names[index+1]].tolist())
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
chart = slide.shapes.add_chart(
XL_CHART_TYPE.BAR_CLUSTERED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
series = chart.series[0]
# If graphs for whole country are generated, set for bar with country with red color
# else set to blue color (same color as title uses)
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(43, 88, 173)
if (self.number_of_series >= 5):
series = chart.series[4]
fill = series.format.fill
fill.solid()
fill.fore_color.rgb = RGBColor(80, 137, 188)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = Pt(11)
tick_labels.font.name = self.font_name
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.dash_style = MSO_LINE.DASH
value_axis.major_gridlines.format.line.width = Pt(0.5)
value_axis.maximum_scale = 100
category_axis = chart.category_axis
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
category_labels.tickLblSkip = 1
# Set legend
chart.has_legend = True
chart.legend.position = XL_LEGEND_POSITION.BOTTOM
chart.legend.include_in_layout = False
chart.legend.font.name = self.font_name
class GenerateYearsCompPresentation:
""" The class creating presentation with graphs representing country comparison through all years in the dataset (eg. 2016, 2017, 2018, 2019).
:param df: the dataframe with calculated statistics per years or period
:type df: pandas dataframe
:param name: the name of the presentation
:type name: str
"""
def __init__(self, df, name):
self.df = df
self.name = name
# Get absolute path to the database.
script_dir = os.path.dirname(__file__)
master_pptx = "countries_comparison.pptx"
self.master = os.path.normpath(os.path.join(script_dir, "backgrounds", master_pptx))
self._generate_graphs()
def _generate_graphs(self):
"""Generate graphs into presentation (pptx)."""
prs = Presentation(self.master)
first_slide = prs.slides[0]
shape = first_slide.shapes[5]
text_frame = shape.text_frame
first_slide_text = "Data Comparison"
p = text_frame.paragraphs[0]
run = p.add_run()
run.text = first_slide_text
font = run.font
font.name = 'Calibri Light'
font.size = Pt(26)
font.color.rgb = RGBColor(250,250,250)
# if (self.country_name in ['Ukraine', 'Poland'] and len(df) > 2):
# main_col = 'Site ID'
# else:
main_col = 'Site Name'
# main_col = "Site Name"
years = ', '.join(map(str, self.df[main_col].tolist()))
title = "Temporal trends - {}".format(years)
titles = []
graph_types = []
legend = []
# Total patients and median age graphst)
column_name = 'Total Patients'
df = self.df[[main_col, column_name]]
titles.append("Total Patients")
graph_types.append("normal")
column_name = 'Median patient age'
df1 = self.df[[main_col, column_name]]
titles.append("Median patient age")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, presentation=prs, title=title, titles=titles, graph_types=graph_types)
legends = []
titles = []
graph_types = []
### DEPARTMENT TYPE ###
legend = ['neurology', 'neurosurgery', 'anesthesiology resuscitation critical care', 'internal medicine', 'geriatrics', 'other']
legends.append(legend)
titles.append("Department type (%)")
graph_types.append("stacked")
df = self.df[[main_col, '% department type - neurology', '% department type - neurosurgery', '% department type - anesthesiology/resuscitation/critical care', '% department type - internal medicine', '% department type - geriatrics', '% department type - Other']]
### HOSPITALIZATION DESTINATION ###
legend = ['stroke unit', 'monitored bed with telemetry', 'standard bed']
legends.append(legend)
titles.append("Hospitalization type (%)")
graph_types.append("stacked")
df1 = self.df[[main_col, '% patients hospitalized in stroke unit / ICU', '% patients hospitalized in monitored bed with telemetry', '% patients hospitalized in standard bed']]
### REHABILIATION ###
column_name = '% patients assessed for rehabilitation - Yes'
df2 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("Assessed for rehabilitation (%)")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, df2=df2, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
legends = []
titles = []
graph_types = []
# STROKE TYPE #
legend = ['ischemic', 'transient ischemic attack', 'intracerebral hemorrhage', 'subarrachnoid hemorrhage', 'cerebral venous thrombosis', 'undetermined']
legends.append(legend)
titles.append("Stroke type (%)")
graph_types.append("stacked")
df = self.df[[main_col, '% stroke type - ischemic stroke', '% stroke type - transient ischemic attack', '% stroke type - intracerebral hemorrhage', '% stroke type - subarrachnoid hemorrhage', '% stroke type - cerebral venous thrombosis', '% stroke type - undetermined stroke']]
# CT/MRI performed #
column_name = '% CT/MRI - performed'
df1 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("CT/MRI (%)")
graph_types.append("normal")
# patients recanalized
column_name = '% patients recanalized'
df2 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients recanalized")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, df2=df2, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
legends = []
titles = []
graph_types = []
# RECANALIZATION PROCEDURES #
legend = ['IV tPa', 'IV tPa + endovascular treatment', 'endovascular treatment', 'IV tPa + another centre for endovascular treatment']
legends.append(legend)
titles.append("Recanalization procedures (%)")
df = self.df[[main_col, '% recanalization procedures - IV tPa', '% recanalization procedures - IV tPa + endovascular treatment', '% recanalization procedures - Endovascular treatment alone', '% recanalization procedures - IV tPa + referred to another centre for endovascular treatment']]
graph_types.append("stacked")
# MEDIAN DTN #
column_name = 'Median DTN (minutes)'
df1 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("Median DTN (minutes)")
graph_types.append("normal")
# MEDIAN DTN #
column_name = 'Median DTG (minutes)'
df2 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("Median DTG (minutes)")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, df2=df2, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
legends = []
titles = []
graph_types = []
# DYSPHAGIA SCREENING #
column_name = '% dysphagia screening - Guss test'
legend = ['GUSS test', 'Other test', 'Another centre']
legends.append(legend)
titles.append("Dysphagia screening (%)")
graph_types.append("stacked")
df = self.df[[main_col, '% dysphagia screening - Guss test', '% dysphagia screening - Other test', '% dysphagia screening - Another centre']]
# CAROTID ARTERIES IMAGING #
column_name = '% carotid arteries imaging - Yes'
df1 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("Carotid arteries imaging (%)")
graph_types.append("normal")
# % RECOMMENDED TO A CEREBROVASCULAR EXPERT - RECOMMENDED #
column_name = '% recommended to a cerebrovascular expert - Recommended'
df2 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("Recommended to a cerebrovascular \nexpert (%)")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, df2=df2, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
legends = []
titles = []
graph_types = []
# PRESCRIBED ANTIPLATELETS #
column_name = '% patients prescribed antiplatelets without aFib'
df = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients prescribed antiplatelets without aFib")
graph_types.append("normal")
# % PATIENTS PRESCRIBED ANTICOAGULANTS WITH AFIB #
column_name = '% patients prescribed anticoagulants with aFib'
df1 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients prescribed anticoagulants with aFib")
graph_types.append("normal")
# % PATIENTS PRESCRIBED ANTITHROMBOTICS WITH AFIB #
column_name = '% patients prescribed antithrombotics with aFib'
df2 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients prescribed antithrombotics with aFib")
graph_types.append("normal")
# % PATIENTS PRESCRIBED ANTICOAGULANTS WITH AFIB (HOME) #
column_name = '% afib patients discharged home with anticoagulants'
df3 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% aFib patients discharged home with anticoagulants")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, df2=df2, df3=df3, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
legends = []
titles = []
graph_types = []
# DISCHARGE WITH STATINS #
column_name = '% patients prescribed statins - Yes'
df = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients prescribed statins")
graph_types.append("normal")
# % PATIENTS PRESCRIBED ANTICOAGULANTS WITH AFIB #
column_name = '% prescribed antihypertensives - Yes'
df1 = self.df[[main_col, column_name]]
legend = []
legends.append(legend)
titles.append("% patients prescribed antihypertensives")
graph_types.append("normal")
GenerateYearsCompGraphs(df=df, df1=df1, presentation=prs, title=title, titles=titles, graph_types=graph_types, legends=legends)
# set pptx output name (for cz it'll be presentation_CZ.pptx)
working_dir = os.getcwd()
pptx = self.name + ".pptx"
presentation_path = os.path.normpath(os.path.join(working_dir, pptx))
prs.save(presentation_path)
class GenerateYearsCompGraphs:
""" The class generating graphs into presentation for country per years. If only one dataframe is provided than one graph is created on slide, two graphs is two dataframes are provided etc.
:param presentation: the opened presentation document
:type presentation: Presentation object
:param df: dataframe containing calculated statistics
:type df: pandas dataframe
:param df1: 2nd dataframe containing calculated statistics
:type df1: pandas dataframe
:param df2: 3rd dataframe containing calculated statistics
:type df2: pandas dataframe
:param df3: 4th dataframe containing calculated statistics
:type df3: pandas dataframe
:param title: the title of the slide
:type title: str
:param titles: the titles of each graph
:type titles: list
:param graph_types: the list of types of graphs
:type graph_types: list
:param legends: the nested list containing lists of legends
:type legends: nested list
:param outcome: `True` if outcome calculation should be included in the presentation
:type outcome: bool
"""
def __init__(self, presentation, df, df1=None, df2=None, df3=None, title="", titles=None, graph_types=None, legends=[], outcome=False):
self.df = df
self.df1 = df1
self.df2 = df2
self.df3 = df3
self.presentation = presentation
self.title = title
self.titles = titles
self.legends = legends
self.num_graphs = 0
self.graph_types = graph_types
self.font_name = 'Century Gothic'
self.category_font_size = Pt(8)
self.data_label_font_size = Pt(9)
if outcome:
self.categories_column = 'Patient Group'
else:
self.categories_column = 'Site Name'
if df1 is not None:
self.num_graphs += 1
if df2 is not None:
self.num_graphs += 1
if df3 is not None:
self.num_graphs += 1
# Add slide to presentation (layout 11 is our custom layout where only title 'Agency FB', color: RGBColor(43, 88, 173) and size:24 is set)
self.slide = self.presentation.slides.add_slide(self.presentation.slide_layouts[12])
# Get title object
title_placeholders = self.slide.shapes.title
# Set title
title_placeholders.text = self.title.upper()
subtitle = self.slide.placeholders[1]
sp = subtitle.element
sp.getparent().remove(sp)
self.colors = {
'blue': RGBColor(43, 88, 173),
'violet': RGBColor(76, 70, 127),
'orange': RGBColor(237, 145, 49),
'green': RGBColor(146, 208, 80),
# 'dark_blue': RGBColor(37, 94, 145),
'yellow': RGBColor(255, 192, 0),
'grey': RGBColor(165, 165, 165)
}
if self.num_graphs == 0:
if outcome:
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(32),
'left': Cm(0.5),
'top': Cm(2)
}
else:
specs = {
'height': Cm(16.5),
'width': Cm(32),
'left': Cm(0.7),
'top': Cm(2)
}
self._create_plot(df=df, title=titles[0], specs=specs, graph_type=graph_types[0], ix=0)
# If number of graph is equal to 1 (2 graphs on one slide) set specs that each graph will be on one half of slide page
if self.num_graphs == 1:
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
# Add chart on slide
specs1 = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
self._create_plot(df=df, title=titles[0], specs=specs, graph_type=graph_types[0], ix=0)
self._create_plot(df=df1, title=titles[1], specs=specs1, graph_type=graph_types[1], ix=1)
# If number of graph is equal to 2 (3 graphs on one slide) set specs that one graph will be on half of page, and two graphs on second half
if self.num_graphs == 2:
# Add chart on slide
specs = {
'height': Cm(16.5),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
# Add chart on slide
specs1 = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
# Add chart on slide
specs2 = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(10.25)
}
self._create_plot(df=df, title=titles[0], specs=specs, graph_type=graph_types[0], ix=0)
self._create_plot(df=df1, title=titles[1], specs=specs1, graph_type=graph_types[1], ix=1)
self._create_plot(df=df2, title=titles[2], specs=specs2, graph_type=graph_types[2], ix=2)
# If number of graph is equal to 3 (4 graphs on one slide) set specs that each graph will be put on quarter of page
if self.num_graphs == 3:
# Add chart on slide
specs = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(2)
}
specs1 = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(0.5),
'top': Cm(10.25)
}
# Add chart on slide
specs2 = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(2)
}
# Add chart on slide
specs3 = {
'height': Cm(8.25),
'width': Cm(15.26),
'left': Cm(17.5),
'top': Cm(10.25)
}
self._create_plot(df=df, title=titles[0], specs=specs, graph_type=graph_types[0], ix=0)
self._create_plot(df=df1, title=titles[1], specs=specs1, graph_type=graph_types[1], ix=1)
self._create_plot(df=df2, title=titles[2], specs=specs2, graph_type=graph_types[2], ix=2)
self._create_plot(df=df3, title=titles[3], specs=specs3, graph_type=graph_types[3], ix=3)
def _set_transparency(self, transparency, elm):
""" The function set the transparency of the row.
:param transparency: the transparency in %
:type transparency: int
:param elm: the element which transparency should be changed
:type elm: format.line.color._xFill
"""
a = str(100 - transparency) + '196'
alpha = OxmlElement('a:alpha')
alpha.set('val', a)
elm.srgbClr.append(alpha)
def _create_plot(self, df, title, specs, graph_type, legend=None, ix=0):
""" The function creating the new graph into the presentation based on the graph type.
:param df: the dataframe with data to be shown
:type df: pandas dataframe
:param title: the title of the graph
:type title: str
:param specs: the position settings
:type specs: dictionary
:param graph_type: the type of graph (normal or stacked)
:type graph_type: str
:param legend: the list of values in legend based on columns (only for stacked barplot)
:type legend: list
:param ix: the index which legend should be used
:type ix: int
"""
if graph_type == "normal":
# Get column names of dataframe
column_names = df.columns.tolist()
index = column_names.index(self.categories_column)
# 1st chart (left side) - nationally sample
chart_data = ChartData()
chart_data.categories = df[self.categories_column].tolist()
chart_data.add_series(column_names[index+1], df[column_names[index+1]].tolist())
chart = self.slide.shapes.add_chart(
XL_CHART_TYPE.COLUMN_CLUSTERED, specs['left'], specs['top'], specs['width'], specs['height'], chart_data).chart
# Get series of chart
series = chart.series[0]
number_of_series = len(df[self.categories_column].tolist())
series.points[0].format.fill.solid()
series.points[0].format.fill.fore_color.rgb = self.colors['blue']
if (number_of_series >= 2):
series.points[1].format.fill.solid()
series.points[1].format.fill.fore_color.rgb = self.colors['orange']
if (number_of_series >= 3):
series.points[2].format.fill.solid()
series.points[2].format.fill.fore_color.rgb = self.colors['green']
# Get plot
plot = chart.plots[0]
# Set for each bar same color
plot.vary_by_categories = True
# Show data labels and set font
plot.has_data_labels = True
data_labels = plot.data_labels
data_labels.font.size = self.data_label_font_size
data_labels.font.bold = True
data_labels.font.name = self.font_name
chart_text = chart.chart_title.text_frame
chart_text.text = title
chart_text.paragraphs[0].font.name = self.font_name
chart_text.paragraphs[0].font.size = Pt(14)
chart_text.paragraphs[0].font.color.rgb = RGBColor(89, 89, 89)
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = self.category_font_size
tick_labels.font.name = self.font_name
# Don't show major gridlines
#value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.color.rgb = RGBColor(217, 217, 217)
value_axis.major_gridlines.format.line.width = Pt(0.5)
# Set range of axis
value_axis.minimum_scale = 0
values = df[column_names[index+1]].tolist()
max_value = max(values)
if '%' in title and max_value >= 90:
value_axis.maximum_scale = 100
else:
value_axis.maximum_scale = math.ceil(max_value / 10.0) * 10
value_axis.visible = False
# Value for y-axis (change font size, name, and other things)
category_axis = chart.category_axis
# Delete tick marks
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_axis.major_unit = 1
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
# Create stacked barplot
else:
# Get column names of dataframe
column_names = df.columns.tolist()
index = column_names.index(self.categories_column) + 1
legend = self.legends[ix]
number_of_series = len(legend)
# 1st dataframe (nationally sample)
chart_data = ChartData()
chart_data.categories = df[self.categories_column].tolist()
# Add data in each category
chart_data.add_series(legend[0], df[column_names[index]].tolist())
if (number_of_series >= 2):
chart_data.add_series(legend[1], df[column_names[index+1]].tolist())
if (number_of_series >= 3):
chart_data.add_series(legend[2], df[column_names[index+2]].tolist())
if (number_of_series >= 4):
chart_data.add_series(legend[3], df[column_names[index+3]].tolist())
if (number_of_series >= 5):
chart_data.add_series(legend[4], df[column_names[index+4]].tolist())
if (number_of_series >= 6):
chart_data.add_series(legend[5], df[column_names[index+5]].tolist())
if (number_of_series >= 7):
chart_data.add_series(legend[6], df[column_names[index+6]].tolist())
if (number_of_series >= 8):
chart_data.add_series(legend[7], df[column_names[index+7]].tolist())
chart = self.slide.shapes.add_chart(
XL_CHART_TYPE.COLUMN_STACKED, specs['left'],specs['top'], specs['width'],specs['height'], chart_data).chart
series = chart.series[0]
# Get series of chart
chart.series[0].format.fill.solid()
chart.series[0].format.fill.fore_color.rgb = self.colors['blue']
if (number_of_series >= 2):
chart.series[1].format.fill.solid()
chart.series[1].format.fill.fore_color.rgb = self.colors['orange']
if (number_of_series >= 3):
chart.series[2].format.fill.solid()
chart.series[2].format.fill.fore_color.rgb = self.colors['green']
if (number_of_series >= 4):
chart.series[3].format.fill.solid()
chart.series[3].format.fill.fore_color.rgb = self.colors['grey']
if (number_of_series >= 5):
chart.series[4].format.fill.solid()
chart.series[4].format.fill.fore_color.rgb = self.colors['violet']
if (number_of_series >= 6):
chart.series[5].format.fill.solid()
chart.series[5].format.fill.fore_color.rgb = self.colors['yellow']
# Value for x-axis (change font size, name, and other things)
value_axis = chart.value_axis
tick_labels = value_axis.tick_labels
tick_labels.font.size = Pt(11)
tick_labels.font.name = self.font_name
value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
# Don't show major gridlines
#value_axis.major_tick_mark = XL_TICK_MARK.OUTSIDE
value_axis.has_major_gridlines = True
value_axis.major_gridlines.format.line.color.rgb = RGBColor(217, 217, 217)
value_axis.major_gridlines.format.line.width = Pt(0.5)
# Set range of axis
value_axis.minimum_scale = 0
value_axis.maximum_scale = 100
value_axis.visible = True
# Set 100% transparency to value axis
value_axis.format.line.color.rgb = RGBColor(0, 0, 0)
solidFill = value_axis.format.line.color._xFill
self._set_transparency(100, solidFill)
category_axis = chart.category_axis
category_axis.format.line.color.rgb = RGBColor(0, 0, 0)
solidFill = category_axis.format.line.color._xFill
self._set_transparency(100, solidFill)
category_axis.major_tick_mark = XL_TICK_MARK.NONE
category_labels = category_axis.tick_labels
category_labels.font.size = self.category_font_size
category_labels.font.name = self.font_name
category_labels.tickLblSkip = 1
# Set legend
chart.has_legend = True
chart.legend.position = XL_LEGEND_POSITION.BOTTOM
chart.legend.include_in_layout = False
chart.legend.font.name = self.font_name
chart.legend.font.size = Pt(14)
chart_text = chart.chart_title.text_frame
chart_text.text = title
chart_text.paragraphs[0].font.size = Pt(18)
chart_text.paragraphs[0].font.color.rgb = RGBColor(89, 89, 89)
| 42.582896
| 329
| 0.616032
| 9,859
| 81,163
| 4.896338
| 0.056294
| 0.04371
| 0.015951
| 0.01796
| 0.851304
| 0.811903
| 0.790918
| 0.762207
| 0.738487
| 0.720423
| 0
| 0.017273
| 0.268866
| 81,163
| 1,905
| 330
| 42.605249
| 0.796212
| 0.167034
| 0
| 0.741228
| 1
| 0
| 0.128649
| 0.003332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015789
| false
| 0
| 0.015789
| 0
| 0.039474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
15c66488040901a663574cbbc6754f80ac664ce9
| 266
|
py
|
Python
|
events/templatetags/events_tags.py
|
Engerrs/ckan.org
|
a5a9b63b0ca16cb5aa4f709f7a264b8f6c265158
|
[
"BSD-3-Clause"
] | 1
|
2022-03-18T03:20:00.000Z
|
2022-03-18T03:20:00.000Z
|
events/templatetags/events_tags.py
|
Engerrs/ckan.org
|
a5a9b63b0ca16cb5aa4f709f7a264b8f6c265158
|
[
"BSD-3-Clause"
] | 26
|
2021-07-07T08:42:42.000Z
|
2022-03-29T14:34:59.000Z
|
events/templatetags/events_tags.py
|
Engerrs/ckan.org
|
a5a9b63b0ca16cb5aa4f709f7a264b8f6c265158
|
[
"BSD-3-Clause"
] | 3
|
2021-07-07T22:11:03.000Z
|
2021-09-15T18:19:10.000Z
|
from django import template
register = template.Library()
@register.simple_tag()
def event_start(start_date):
return start_date.strftime("%d %B %Y - %H:%M")
@register.simple_tag()
def event_start_date(start_date):
return start_date.strftime("%d %B %Y")
| 19
| 50
| 0.721805
| 40
| 266
| 4.575
| 0.475
| 0.245902
| 0.185792
| 0.218579
| 0.710383
| 0.710383
| 0.382514
| 0.382514
| 0.382514
| 0
| 0
| 0
| 0.135338
| 266
| 13
| 51
| 20.461538
| 0.795652
| 0
| 0
| 0.25
| 0
| 0
| 0.090566
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0.25
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c614d702722e2565dd668a847c81682c44a0e825
| 38,846
|
py
|
Python
|
pysnmp/HUAWEI-L2VPN-OAM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/HUAWEI-L2VPN-OAM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/HUAWEI-L2VPN-OAM-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module HUAWEI-L2VPN-OAM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-L2VPN-OAM-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:33:57 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
HWL2VpnVcEncapsType, = mibBuilder.importSymbols("HUAWEI-VPLS-EXT-MIB", "HWL2VpnVcEncapsType")
ifName, InterfaceIndexOrZero = mibBuilder.importSymbols("IF-MIB", "ifName", "InterfaceIndexOrZero")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
ModuleIdentity, MibIdentifier, Gauge32, ObjectIdentity, Bits, TimeTicks, IpAddress, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, iso, Counter64, Integer32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "MibIdentifier", "Gauge32", "ObjectIdentity", "Bits", "TimeTicks", "IpAddress", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "iso", "Counter64", "Integer32", "Unsigned32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
hwL2vpnOamTrap = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9))
hwL2vpnOamTrap.setRevisions(('2013-09-05 14:00', '2013-05-13 13:30', '2013-03-25 14:52',))
if mibBuilder.loadTexts: hwL2vpnOamTrap.setLastUpdated('201309051400Z')
if mibBuilder.loadTexts: hwL2vpnOamTrap.setOrganization('Huawei Technologies Co.,Ltd.')
hwL2Vpn = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119))
hwL2vpnOamTrapMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1))
hwL2vpnServiceType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("vpwsPw", 0), ("vplsPw", 1)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnServiceType.setStatus('current')
hwL2vpnProtocolType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("static", 0), ("ldp", 1), ("bgp", 2)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnProtocolType.setStatus('current')
hwL2vpnVcID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 3), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnVcID.setStatus('current')
hwL2vpnVcType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 4), HWL2VpnVcEncapsType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnVcType.setStatus('current')
hwL2vpnPeerAddr = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 5), IpAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnPeerAddr.setStatus('current')
hwL2vpnIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 6), InterfaceIndexOrZero()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnIfIndex.setStatus('current')
hwL2vpnPwMaster = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("second", 2)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnPwMaster.setStatus('current')
hwL2vpnRmtSiteID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 8), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnRmtSiteID.setStatus('current')
hwL2vpnInLabel = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 9), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnInLabel.setStatus('current')
hwL2vpnOutLabel = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 10), Unsigned32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnOutLabel.setStatus('current')
hwL2vpnIfName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 11), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnIfName.setStatus('current')
hwL2vpnVsiName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 1, 12), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwL2vpnVsiName.setStatus('current')
hwL2vpnOamMIBTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2))
hwL2vpnOamDloc = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 1)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamDloc.setStatus('current')
hwL2vpnOamDlocClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 2)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamDlocClear.setStatus('current')
hwL2vpnOamSd1Near = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 3)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd1Near.setStatus('current')
hwL2vpnOamSd1NearClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 4)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd1NearClear.setStatus('current')
hwL2vpnOamRdi = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 5)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamRdi.setStatus('current')
hwL2vpnOamRdiClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 6)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamRdiClear.setStatus('current')
hwL2vpnOamMeg = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 7)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMeg.setStatus('current')
hwL2vpnOamMegClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 8)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMegClear.setStatus('current')
hwL2vpnOamMep = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 9)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMep.setStatus('current')
hwL2vpnOamMepClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 10)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMepClear.setStatus('current')
hwL2vpnOamPeriod = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 11)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamPeriod.setStatus('current')
hwL2vpnOamPeriodClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 12)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamPeriodClear.setStatus('current')
hwL2vpnOamAis = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 13)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamAis.setStatus('current')
hwL2vpnOamAisClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 14)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamAisClear.setStatus('current')
hwL2vpnOamSd2Near = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 15)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd2Near.setStatus('current')
hwL2vpnOamSd2NearClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 16)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd2NearClear.setStatus('current')
hwL2vpnOamLck = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 17)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamLck.setStatus('current')
hwL2vpnOamLckClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 18)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamLckClear.setStatus('current')
hwL2vpnOamCsf = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 19)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamCsf.setStatus('current')
hwL2vpnOamCsfClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 20)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamCsfClear.setStatus('current')
hwL2vpnOamExcess = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 21)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamExcess.setStatus('current')
hwL2vpnOamExcessClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 22)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamExcessClear.setStatus('current')
hwL2vpnOamMismatch = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 23)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMismatch.setStatus('current')
hwL2vpnOamMismatchClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 24)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMismatchClear.setStatus('current')
hwL2vpnOamMismerge = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 25)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMismerge.setStatus('current')
hwL2vpnOamMismergeClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 26)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamMismergeClear.setStatus('current')
hwL2vpnOamFail = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 27)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamFail.setStatus('current')
hwL2vpnOamFailClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 28)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamFailClear.setStatus('current')
hwL2vpnOamDbdi = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 29)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamDbdi.setStatus('current')
hwL2vpnOamDbdiClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 30)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamDbdiClear.setStatus('current')
hwL2vpnOamUnknown = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 31)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamUnknown.setStatus('current')
hwL2vpnOamUnknownClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 32)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamUnknownClear.setStatus('current')
hwL2vpnOamLocalLock = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 33)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamLocalLock.setStatus('current')
hwL2vpnOamLocalLockClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 34)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamLocalLockClear.setStatus('current')
hwL2vpnOamSd1Far = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 35)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd1Far.setStatus('current')
hwL2vpnOamSd1FarClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 36)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd1FarClear.setStatus('current')
hwL2vpnOamSd2Far = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 37)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd2Far.setStatus('current')
hwL2vpnOamSd2FarClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 38)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamSd2FarClear.setStatus('current')
hwL2vpnOamFdi = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 39)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamFdi.setStatus('current')
hwL2vpnOamFdiClear = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 2, 40)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if mibBuilder.loadTexts: hwL2vpnOamFdiClear.setStatus('current')
hwL2vpnOamConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3))
hwL2vpnOamMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3, 1))
hwL2vpnOamMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3, 1, 1)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamGroup"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamNotificationGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwL2vpnOamMIBCompliance = hwL2vpnOamMIBCompliance.setStatus('current')
hwL2vpnOamGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3, 2))
hwL2vpnOamGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3, 2, 1)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnServiceType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnProtocolType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVcType"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPeerAddr"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfIndex"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnPwMaster"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnRmtSiteID"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnInLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOutLabel"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnIfName"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnVsiName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwL2vpnOamGroup = hwL2vpnOamGroup.setStatus('current')
hwL2vpnOamNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 9, 3, 2, 2)).setObjects(("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamDloc"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamDlocClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd1Near"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd1NearClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamRdi"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamRdiClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMeg"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMegClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMep"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMepClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamPeriod"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamPeriodClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamAis"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamAisClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd2Near"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd2NearClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamLck"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamLckClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamCsf"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamCsfClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamExcess"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamExcessClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMismatch"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMismatchClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMismerge"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamMismergeClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamFail"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamFailClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamDbdi"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamDbdiClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamUnknown"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamUnknownClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamLocalLock"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamLocalLockClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd1Far"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd1FarClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd2Far"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamSd2FarClear"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamFdi"), ("HUAWEI-L2VPN-OAM-MIB", "hwL2vpnOamFdiClear"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwL2vpnOamNotificationGroup = hwL2vpnOamNotificationGroup.setStatus('current')
mibBuilder.exportSymbols("HUAWEI-L2VPN-OAM-MIB", hwL2vpnOamLocalLock=hwL2vpnOamLocalLock, hwL2vpnOutLabel=hwL2vpnOutLabel, hwL2vpnOamMismerge=hwL2vpnOamMismerge, hwL2vpnOamDbdi=hwL2vpnOamDbdi, hwL2vpnOamDlocClear=hwL2vpnOamDlocClear, hwL2vpnProtocolType=hwL2vpnProtocolType, hwL2vpnOamFailClear=hwL2vpnOamFailClear, hwL2Vpn=hwL2Vpn, hwL2vpnOamMIBTraps=hwL2vpnOamMIBTraps, hwL2vpnOamFail=hwL2vpnOamFail, hwL2vpnServiceType=hwL2vpnServiceType, hwL2vpnOamAisClear=hwL2vpnOamAisClear, hwL2vpnOamMIBCompliance=hwL2vpnOamMIBCompliance, hwL2vpnOamMep=hwL2vpnOamMep, hwL2vpnOamSd2Near=hwL2vpnOamSd2Near, hwL2vpnVsiName=hwL2vpnVsiName, hwL2vpnRmtSiteID=hwL2vpnRmtSiteID, hwL2vpnOamLck=hwL2vpnOamLck, hwL2vpnIfName=hwL2vpnIfName, hwL2vpnOamExcess=hwL2vpnOamExcess, hwL2vpnOamSd1NearClear=hwL2vpnOamSd1NearClear, hwL2vpnOamMeg=hwL2vpnOamMeg, hwL2vpnOamSd2NearClear=hwL2vpnOamSd2NearClear, hwL2vpnOamMIBCompliances=hwL2vpnOamMIBCompliances, hwL2vpnOamGroups=hwL2vpnOamGroups, hwL2vpnOamRdi=hwL2vpnOamRdi, hwL2vpnOamMismatchClear=hwL2vpnOamMismatchClear, hwL2vpnInLabel=hwL2vpnInLabel, hwL2vpnOamTrap=hwL2vpnOamTrap, hwL2vpnOamMegClear=hwL2vpnOamMegClear, hwL2vpnOamPeriod=hwL2vpnOamPeriod, hwL2vpnOamRdiClear=hwL2vpnOamRdiClear, hwL2vpnOamAis=hwL2vpnOamAis, hwL2vpnPeerAddr=hwL2vpnPeerAddr, hwL2vpnOamUnknown=hwL2vpnOamUnknown, hwL2vpnOamDloc=hwL2vpnOamDloc, hwL2vpnOamUnknownClear=hwL2vpnOamUnknownClear, hwL2vpnOamCsfClear=hwL2vpnOamCsfClear, hwL2vpnPwMaster=hwL2vpnPwMaster, hwL2vpnOamTrapMIBObjects=hwL2vpnOamTrapMIBObjects, PYSNMP_MODULE_ID=hwL2vpnOamTrap, hwL2vpnOamSd2Far=hwL2vpnOamSd2Far, hwL2vpnOamPeriodClear=hwL2vpnOamPeriodClear, hwL2vpnOamFdi=hwL2vpnOamFdi, hwL2vpnOamGroup=hwL2vpnOamGroup, hwL2vpnVcID=hwL2vpnVcID, hwL2vpnOamDbdiClear=hwL2vpnOamDbdiClear, hwL2vpnOamSd1Far=hwL2vpnOamSd1Far, hwL2vpnOamSd1FarClear=hwL2vpnOamSd1FarClear, hwL2vpnOamMepClear=hwL2vpnOamMepClear, hwL2vpnOamMismatch=hwL2vpnOamMismatch, hwL2vpnOamSd1Near=hwL2vpnOamSd1Near, hwL2vpnOamLckClear=hwL2vpnOamLckClear, hwL2vpnOamMismergeClear=hwL2vpnOamMismergeClear, hwL2vpnOamConformance=hwL2vpnOamConformance, hwL2vpnOamCsf=hwL2vpnOamCsf, hwL2vpnOamFdiClear=hwL2vpnOamFdiClear, hwL2vpnOamExcessClear=hwL2vpnOamExcessClear, hwL2vpnOamLocalLockClear=hwL2vpnOamLocalLockClear, hwL2vpnOamSd2FarClear=hwL2vpnOamSd2FarClear, hwL2vpnIfIndex=hwL2vpnIfIndex, hwL2vpnVcType=hwL2vpnVcType, hwL2vpnOamNotificationGroup=hwL2vpnOamNotificationGroup)
| 273.56338
| 2,411
| 0.731349
| 4,410
| 38,846
| 6.441723
| 0.049887
| 0.207934
| 0.264644
| 0.321353
| 0.722226
| 0.701141
| 0.683258
| 0.677591
| 0.677591
| 0.677591
| 0
| 0.07495
| 0.061654
| 38,846
| 141
| 2,412
| 275.503546
| 0.704398
| 0.008598
| 0
| 0.022556
| 0
| 0
| 0.519596
| 0.007558
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.067669
| 0
| 0.067669
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d66474144ba58d339f0033bfb94075f994b56334
| 24,323
|
py
|
Python
|
main.py
|
LiuCLab/multiphoton-selective-excitation
|
b93a61ec81cf216f71bcd68a94cdb192f4a401e7
|
[
"MIT"
] | null | null | null |
main.py
|
LiuCLab/multiphoton-selective-excitation
|
b93a61ec81cf216f71bcd68a94cdb192f4a401e7
|
[
"MIT"
] | null | null | null |
main.py
|
LiuCLab/multiphoton-selective-excitation
|
b93a61ec81cf216f71bcd68a94cdb192f4a401e7
|
[
"MIT"
] | null | null | null |
"""main
Main file to run to create figures 2-5 in the paper.
All values are in SI units (m, s, T, etc) unless otherwise noted.
Dependencies:
sigpy (https://sigpy.readthedocs.io/en/latest/mri_rf.html)
numpy
scipy
matplotlib
Author: Victor Han
Last Modified: 6/2/21
"""
import numpy as np
import sigpy as sp
import sigpy.mri as mr
import sigpy.mri.rf as rf
import sigpy.plot as pl
import scipy.signal as signal
import matplotlib.pyplot as plt
from scipy.special import *
from scipy.integrate import odeint
import matplotlib.gridspec as gridspec
import csv
from various_constants import *
from pulse_generator_functions import *
from simulation_functions import *
from plotting_functions import *
#################################################################
# Make Figure 2: equivalent single-photon, two-photon, and freq mod slice select
# Make a grid of subplots and label the rows and columns
fig = plt.figure(figsize=(20, 10))
cols = 2+SEQUENCE_PLOT_END # Number of columns used for pulse sequence plot
outer = gridspec.GridSpec(4, cols, wspace=1, hspace=0.3)
ax = plt.Subplot(fig, outer[cols])
t = ax.text(0.7,0.2, 'Single-Photon', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[2*cols])
t = ax.text(0.7,0.2, 'Two-Photon', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[3*cols])
t = ax.text(0.7,-0.1, 'Frequency Modulation', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[1:SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Pulse Sequence', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Simulation Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END+1])
t = ax.text(0.5,0, 'Experimental Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
# 2a) Make single photon version
pulse = slr_pulse(N, TB, FA, name='2a')
bz_pulse = np.zeros(N)
gz_pulse = np.zeros(N)
sim_duration = PULSE_DURATION*1.5
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, np.zeros(N))
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[cols+1:cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('2a max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[cols+SEQUENCE_PLOT_END+1], '2a.npy')
# 2b) Make two-photon version
pulse = slr_pulse(N, TB, FA, freq=FZ, phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='2b') / (j1(g/(2*np.pi*FZ) * B1Z_AMP))
bz_pulse = B1Z_AMP * np.sin(2e-6*np.arange(N)*2*np.pi*FZ)
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[2*cols+1:2*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('2b max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[2*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[2*cols+SEQUENCE_PLOT_END+1], '2b.npy')
# 2c) Make frequency modulated version
pulse = fm_pulse(N, TB, FA, FZ, B1Z_AMP, phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='2c')
bz_pulse = np.zeros(N)
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[3*cols+1:3*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('2c max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[3*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[3*cols+SEQUENCE_PLOT_END+1], '2c.npy')
plt.savefig("figure2.pdf")
#################################################################
# Make Figure 3: slice shifting
# Make a grid of subplots and label the rows and columns
fig = plt.figure(figsize=(20, 10))
cols = 2+SEQUENCE_PLOT_END
outer = gridspec.GridSpec(4, cols, wspace=1, hspace=0.3)
ax = plt.Subplot(fig, outer[cols])
t = ax.text(0.7,0.3, r'$\omega_{xy}$ Shift', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[2*cols])
t = ax.text(0.7,0.2, r'Constant $B_{1z}$', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[3*cols])
t = ax.text(0.7,0.3, r'$\omega_{z}$ Shift', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[1:SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Pulse Sequence', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Simulation Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END+1])
t = ax.text(0.5,0, 'Experimental Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
# 3a) Shifting using wxy offset
f_offset = 2*TB/PULSE_DURATION
if PRINT_MAX_VALS:
print('3 frequency offset: ' + str(f_offset))
pulse = slr_pulse(N, TB, FA, freq=(FZ-f_offset), phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='3a') / (j1(g/(2*np.pi*FZ) * B1Z_AMP)) # this is actually increasing the frequency
bz_pulse = B1Z_AMP * np.sin(2e-6*np.arange(N)*2*np.pi*FZ)
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[cols+1:cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('3a max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[cols+SEQUENCE_PLOT_END+1], '3a.npy')
# 3b) Shifting using constant B1z
pulse = slr_pulse(N, TB, FA, freq=FZ, phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='3b') / (j1(g/(2*np.pi*FZ) * B1Z_AMP))
bz_pulse = B1Z_AMP * np.sin(2e-6*np.arange(N)*2*np.pi*FZ)
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse, dc_value=-2*np.pi*f_offset/g) # subtract to get the same direction as adding wxy
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[2*cols+1:2*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('3b max B1xy: ' + str(np.max(np.abs(pulse))))
print('3b rise-time: ' + str(SLICE_PEAK/SLEW_LIMIT))
if WRITE_WAVEFORM_FILES:
make_b1z_csv(bz_pulse, SLICE_PEAK, PULSE_DURATION, '3b.csv', dc_value=-2*np.pi*f_offset/g)
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION, 0, -2*np.pi*f_offset/g),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[2*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[2*cols+SEQUENCE_PLOT_END+1], '3b.npy')
# 3c) Shifitng using wz offset
pulse = slr_pulse(N, TB, FA, freq=FZ, phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='3c') / (j1(g/(2*np.pi*(FZ+f_offset)) * B1Z_AMP)) # compensate for different wz freq
bz_pulse = B1Z_AMP * np.sin(2e-6*np.arange(N)*2*np.pi*(FZ+f_offset))
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[3*cols+1:3*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz)
if PRINT_MAX_VALS:
print('3c max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[3*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[3*cols+SEQUENCE_PLOT_END+1], '3c.npy')
plt.savefig("figure3.pdf")
#################################################################
# Make Figure 4: Modulation using B1z or B1xy
# Make a grid of subplots and label the rows and columns
fig = plt.figure(figsize=(20, 10))
cols = 2+SEQUENCE_PLOT_END
outer = gridspec.GridSpec(4, cols, wspace=1, hspace=0.3)
ax = plt.Subplot(fig, outer[cols])
t = ax.text(0.7,0.1, r'$B_{1xy}$ Mod Two-Photon', fontsize=CATEGORY_SIZE-2, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[2*cols])
t = ax.text(0.7,0.1, r'$B_{1z}$ Mod Two-Photon', fontsize=CATEGORY_SIZE-2, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[3*cols])
t = ax.text(0.7,0.1, 'Both Mod Two-Photon', fontsize=CATEGORY_SIZE-2, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[1:SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Pulse Sequence', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Simulation Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END+1])
t = ax.text(0.5,0, 'Experimental Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
# 4a) Two-photon slice selection using B1xy modulation
pulse = slr_pulse(N_FIG4, TB_FIG4, FA, freq=FZ, name='4a') / (j1(g/(2*np.pi*FZ) * B1Z_AMP))
bz_pulse = -1*B1Z_AMP * np.cos(2e-6*np.arange(N_FIG4)*2*np.pi*FZ)
sim_duration_fig4 = PULSE_DURATION_FIG4*1.5
if WRITE_WAVEFORM_FILES:
write_rf_pulse_for_heartvista(pulse, '4a')
t = np.linspace(0,sim_duration_fig4,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, gz_pulse)
plot_waveform(fig, outer[cols+1:cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[5.9, 6.1])
if PRINT_MAX_VALS:
print('4a max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration_fig4, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK_FIG4, PULSE_DURATION_FIG4),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[cols+SEQUENCE_PLOT_END+1], '4a.npy')
# 4b) Two-photon slice selection using B1z modulation
max_bessel_arg = g*B1Z_AMP/(2*np.pi*FZ)
pulse = slr_pulse(N_FIG4, TB_FIG4, FA, freq=0)
scale = np.max(np.abs(pulse)) / j1(max_bessel_arg)
pulse = pulse / np.max(np.abs(pulse)) * j1(max_bessel_arg)
pulse_bz1 = np.zeros(len(pulse))
from scipy.optimize import minimize
def diff(x,a):
yt = j1(x)
return (yt - a )**2
for i in range(len(pulse)):
res = minimize(diff, 0, args=(np.real(pulse[i])), bounds=[(-max_bessel_arg, max_bessel_arg)])
pulse_bz1[i] = res.x[0]
bz_pulse = -1*pulse_bz1 * 2*np.pi*FZ/g * np.cos(2e-6*np.arange(N_FIG4)*2*np.pi*FZ) # lets make this a cos to get rid of phase differences
if WRITE_WAVEFORM_FILES:
make_b1z_csv(bz_pulse, SLICE_PEAK, PULSE_DURATION_FIG4, '4b.csv')
pulse = np.zeros(len(pulse), dtype=complex)
for i in range(len(pulse)):
pulse[i] = scale * np.complex(np.cos(2e-6*i*2*np.pi*FZ), np.sin(2e-6*i*2*np.pi*FZ))
if WRITE_WAVEFORM_FILES:
write_rf_pulse_for_heartvista(pulse, '4b')
t = np.linspace(0,sim_duration_fig4,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, gz_pulse)
plot_waveform(fig, outer[2*cols+1:2*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[5.9, 6.1])
if PRINT_MAX_VALS:
print('4b max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration_fig4, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK_FIG4, PULSE_DURATION_FIG4),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[2*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[2*cols+SEQUENCE_PLOT_END+1], '4b.npy')
# 4c) Two-photon slice selection using both B1xy and B1z modulation
pulse = slr_pulse(N_FIG4, TB_FIG4, FA, freq=0) / j1(g/(2*np.pi*FZ) * B1Z_AMP)
for i in range(len(pulse)):
if i<N_FIG4/2:
pulse[i] = scale
else:
bz_pulse[i] = -1*B1Z_AMP * np.cos(2e-6*i*2*np.pi*FZ)
pulse[i] = pulse[i] * np.complex(np.cos(2e-6*i*2*np.pi*FZ), np.sin(2e-6*i*2*np.pi*FZ))
if WRITE_WAVEFORM_FILES:
write_rf_pulse_for_heartvista(pulse, '4c')
make_b1z_csv(bz_pulse, SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, '4c.csv')
t = np.linspace(0,sim_duration_fig4,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK_FIG4, PULSE_DURATION_FIG4, gz_pulse)
plot_waveform(fig, outer[3*cols+1:3*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[5.9, 6.1])
if PRINT_MAX_VALS:
print('4c max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration_fig4, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK_FIG4, PULSE_DURATION_FIG4),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[3*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[3*cols+SEQUENCE_PLOT_END+1], '4c.npy')
plt.savefig("figure4.pdf")
#################################################################
# Make Figure 5: Multislice
# Make a grid of subplots and label the rows and columns
fig = plt.figure(figsize=(20, 10))
cols = 2+SEQUENCE_PLOT_END
outer = gridspec.GridSpec(4, cols, wspace=1, hspace=0.3)
ax = plt.Subplot(fig, outer[cols])
t = ax.text(0.7,0.0, 'Free Unequal Slices', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[2*cols])
t = ax.text(0.7,0.0, r'Naive $\omega_{xy}$ Shifting', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[3*cols])
t = ax.text(0.7,0.0, 'Shifted Multislice', fontsize=CATEGORY_SIZE, rotation=90)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[1:SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Pulse Sequence', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END])
t = ax.text(0.5,0, 'Simulation Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
ax = plt.Subplot(fig, outer[SEQUENCE_PLOT_END+1])
t = ax.text(0.5,0, 'Experimental Slice Profile', fontsize=CATEGORY_SIZE)
t.set_ha('center')
ax.axis("off")
fig.add_subplot(ax)
# 5a) Multiphoton multislice RF pulse using oscillating gradients
acdc_ratio = 1.5
FZ = TB/PULSE_DURATION * 2
SLICE_PEAK = TB/PULSE_DURATION * 2*np.pi/g * 1/THICKNESS
pulse = slr_pulse(N, TB, FA, name="5a")
bz_pulse = np.zeros(N)
gz_pulse = SLICE_PEAK * acdc_ratio * np.sin(2e-6*np.arange(N)*2*np.pi*FZ)
sim_duration = 1.5 * PULSE_DURATION
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
# Now to save the gz waveform
if WRITE_WAVEFORM_FILES:
write_gz_pulse_for_heartvista(gz_pulse, SLICE_PEAK, PULSE_DURATION, '5a_gz')
plot_waveform(fig, outer[cols+1:cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[2, 3.1])
if PRINT_MAX_VALS:
print('5a max B1xy: ' + str(np.max(np.abs(pulse))))
print('5a max gz: ' + str(np.max(np.abs(Gz))))
print('5a time to RF start: ' + str(SLICE_PEAK/SLEW_LIMIT))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION, acdc_ratio*SLICE_PEAK/(2*np.pi*FZ)),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[cols+SEQUENCE_PLOT_END+1], '5a.npy', ylim=[0,10000]) # Max y-value is decreased because higher receiver attenuation was used to not saturate the receiver
# 5b) Shifting the slice via B1xy frequency alone does not work
shift_f = FZ
pulse = slr_pulse(N, TB, FA, freq=FZ, name="5b")
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[2*cols+1:2*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[2, 3.1])
if PRINT_MAX_VALS:
print('5b max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION, acdc_ratio*SLICE_PEAK/(2*np.pi*FZ)),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[2*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[2*cols+SEQUENCE_PLOT_END+1], '5b.npy', ylim=[0,10000]) # Max y-value is decreased because higher receiver attenuation was used to not saturate the receiver
# 5c) Shifting the slice via B1xy frequency and a B1z works
B1Z_AMP = 2*np.pi*shift_f/g * acdc_ratio
pulse = fm_pulse(N, TB, FA*j1(g*B1Z_AMP/(2*np.pi*FZ)), FZ, B1Z_AMP, phase=g*B1Z_AMP/(2*np.pi*FZ)-np.pi/2, name='5c')
t = np.linspace(0,sim_duration,WAVEFORM_RES)
RF = np.zeros(len(t), dtype=complex)
B1z = np.zeros(len(t))
Gz = np.zeros(len(t))
for i in range(len(t)):
RF[i] = bxy_waveform(t[i], SLICE_PEAK, PULSE_DURATION, pulse)
B1z[i] = bz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, bz_pulse)
Gz[i] = gz_waveform(t[i], SLICE_PEAK, PULSE_DURATION, gz_pulse)
plot_waveform(fig, outer[3*cols+1:3*cols+SEQUENCE_PLOT_END], t, np.abs(RF), -1*np.angle(RF), B1z, Gz, zoom_time=[2, 3.1])
if PRINT_MAX_VALS:
print('5c max B1xy: ' + str(np.max(np.abs(pulse))))
M = np.array([0, 0, M0])
t = np.linspace(0, sim_duration, 101)
final_m = np.zeros(XRES, dtype=complex)
x_vals = np.linspace(-XLIM,XLIM,XRES)
for i in range(XRES):
x = x_vals[i]
y = 0
sol = odeint(bloch, M, t, args=(x, y, pulse, bz_pulse, gz_pulse, SLICE_PEAK, PULSE_DURATION, acdc_ratio*SLICE_PEAK/(2*np.pi*FZ)),atol=1e-7, rtol=1e-11, hmax=2e-6, mxstep=5000)
final_m[i] = np.complex(sol[-1,0], sol[-1,1])
plot_sim(fig, outer[3*cols+SEQUENCE_PLOT_END], x_vals, np.abs(final_m), -1*np.angle(final_m))
plot_experiment(fig, outer[3*cols+SEQUENCE_PLOT_END+1], '5c.npy', ylim=[0,10000]) # Max y-value is decreased because higher receiver attenuation was used to not saturate the receiver
plt.savefig("figure5.pdf")
plt.show()
| 38.243711
| 182
| 0.690458
| 4,635
| 24,323
| 3.463215
| 0.062352
| 0.029903
| 0.048592
| 0.053451
| 0.873723
| 0.858647
| 0.843509
| 0.833603
| 0.824632
| 0.817655
| 0
| 0.040831
| 0.12297
| 24,323
| 635
| 183
| 38.303937
| 0.711654
| 0.072812
| 0
| 0.755144
| 0
| 0
| 0.048845
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002058
| false
| 0
| 0.032922
| 0
| 0.037037
| 0.032922
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d69d3d8655a22218c8a71e8d7bbb8f0d0c00af07
| 61
|
py
|
Python
|
cscs-checks/libraries/boost/src/hello.py
|
mboisson/reframe
|
ebf0141596f19c7df60b59d8ad6211067f55b5e5
|
[
"BSD-3-Clause"
] | 167
|
2017-11-14T20:37:28.000Z
|
2022-03-31T11:19:18.000Z
|
cscs-checks/libraries/boost/src/hello.py
|
mboisson/reframe
|
ebf0141596f19c7df60b59d8ad6211067f55b5e5
|
[
"BSD-3-Clause"
] | 2,190
|
2017-06-14T12:48:13.000Z
|
2022-03-31T16:09:51.000Z
|
cscs-checks/libraries/boost/src/hello.py
|
victorusu/reframe
|
e98078a990e31a47604b06d674e4ee730c22cd44
|
[
"BSD-3-Clause"
] | 83
|
2017-05-29T19:12:16.000Z
|
2022-03-18T09:49:21.000Z
|
import hello_boost_python
print(hello_boost_python.greet())
| 15.25
| 33
| 0.852459
| 9
| 61
| 5.333333
| 0.666667
| 0.416667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 61
| 3
| 34
| 20.333333
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
ba505fb6e768b550d7dd051b4fa32758998f2285
| 84
|
py
|
Python
|
src/boofuzz_numbers/__init__.py
|
lucca-ruhland/boofuzz-numbers
|
76ecd5e7d1aa1bc02d5ba070078574968c33598f
|
[
"MIT"
] | null | null | null |
src/boofuzz_numbers/__init__.py
|
lucca-ruhland/boofuzz-numbers
|
76ecd5e7d1aa1bc02d5ba070078574968c33598f
|
[
"MIT"
] | null | null | null |
src/boofuzz_numbers/__init__.py
|
lucca-ruhland/boofuzz-numbers
|
76ecd5e7d1aa1bc02d5ba070078574968c33598f
|
[
"MIT"
] | null | null | null |
from boofuzz_numbers.integer import s_int
from boofuzz_numbers.float import s_float
| 28
| 41
| 0.880952
| 14
| 84
| 5
| 0.571429
| 0.314286
| 0.514286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 84
| 2
| 42
| 42
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ba52b2e8bbdb53a060492b6538bc96605c856c7d
| 155
|
py
|
Python
|
Python_proficiency_test/latex/codes/6.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | 6
|
2020-04-24T08:16:51.000Z
|
2021-11-01T09:50:46.000Z
|
Python_proficiency_test/latex/codes/6.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | null | null | null |
Python_proficiency_test/latex/codes/6.py
|
ALFA-group/neural_program_comprehension
|
0253911f376cf282af5a5627e38e0a591ad38860
|
[
"MIT"
] | 4
|
2021-02-17T20:21:31.000Z
|
2022-02-14T12:43:23.000Z
|
def foo(param1, *param2):
print(param1)
print(param2)
def bar(param1, **param2):
print(param1)
print(param2)
foo(1,2,3,4,5)
bar(6,a=7,b=8)
| 17.222222
| 26
| 0.619355
| 28
| 155
| 3.428571
| 0.571429
| 0.25
| 0.354167
| 0.479167
| 0.708333
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0.125984
| 0.180645
| 155
| 8
| 27
| 19.375
| 0.629921
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.