hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3a8963c6d5929966e5222276159a41595136bc09 | 114 | py | Python | src/errors.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null | src/errors.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null | src/errors.py | 8cylinder/boss | b57df6c1bb6064bfb5ad92313d88854281c0f18e | [
"MIT"
] | null | null | null |
class DependencyError(Exception): pass
class PlatformError(Exception): pass
class SecurityError(Exception): pass
| 22.8 | 38 | 0.833333 | 12 | 114 | 7.916667 | 0.5 | 0.410526 | 0.378947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.087719 | 114 | 4 | 39 | 28.5 | 0.913462 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
3a947b45df99a26108b37935aea45b08387b771d | 100 | py | Python | football_packing/__init__.py | samirak93/Football-packing | 82434aa7a704ca66c720e1bbac33f7982dabf2af | [
"MIT"
] | 17 | 2020-06-01T05:22:48.000Z | 2021-10-05T19:06:43.000Z | football_packing/__init__.py | samirak93/Football-packing | 82434aa7a704ca66c720e1bbac33f7982dabf2af | [
"MIT"
] | null | null | null | football_packing/__init__.py | samirak93/Football-packing | 82434aa7a704ca66c720e1bbac33f7982dabf2af | [
"MIT"
] | null | null | null | from football_packing.packing import packing
from football_packing.plot_packing import plot_packing
| 33.333333 | 54 | 0.9 | 14 | 100 | 6.142857 | 0.357143 | 0.27907 | 0.44186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08 | 100 | 2 | 55 | 50 | 0.934783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3abd563c9f156a5c4f9fa4953526f93041da25fd | 5,601 | py | Python | Bruteforce.py | itz-danish/Bruteforce-of-4-8-digits-using-python3 | e9aeebfbdc236b7bbb2af0e2b42007a4d0897032 | [
"Apache-2.0"
] | 1 | 2021-06-28T20:41:29.000Z | 2021-06-28T20:41:29.000Z | Bruteforce.py | itz-danish/Bruteforce-of-4-8-digits-using-python3 | e9aeebfbdc236b7bbb2af0e2b42007a4d0897032 | [
"Apache-2.0"
] | null | null | null | Bruteforce.py | itz-danish/Bruteforce-of-4-8-digits-using-python3 | e9aeebfbdc236b7bbb2af0e2b42007a4d0897032 | [
"Apache-2.0"
] | null | null | null | in_put = input("Enter your password to bruteforce: ")
a1 = "a"
a2 = "a"
a3 = "a"
a4 = "a"
a5 = "a"
a6 = "a"
a7 = "a"
a8 = "a"
dictionary = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j','k','l','m','n','o','p','q','r','s','t','u','v','w', 'x', 'y', 'z',]
plc1 = -1
plc2 = -1
plc3 = -1
plc4 = -1
plc5 = -1
plc6 = -1
plc7 = -1
plc8 = -1
l = len(in_put)
while True:
if l == 4:
plc4 += 1
a4 = a4.replace(a4,(dictionary[plc4]))
print(a1+a2+a3+a4)
if a1+a2+a3+a4 == in_put:
print("Your password is ",a1+a2+a3+a4)
break;
if plc4 == 25:
plc4 = -1
plc3 += 1
a3 = a3.replace(a3,(dictionary[plc3]))
print(a1+a2+a3+a4)
if a1+a2+a3+a4 == in_put:
print("Your password is ",a1+a2+a3+a4)
break;
if plc3 == 25:
plc3 = -1
plc2 += 1
a2 = a2.replace(a2,(dictionary[plc2]))
print(a1+a2+a3+a4)
if a1+a2+a3+a4 == in_put:
print("Your password is ",a1+a2+a3+a4)
break;
if plc2 == 25:
plc2 = -1
plc1+= 1
a1 = a1.replace(a1,(dictionary[plc1]))
print(a1+a2+a3+a4)
if a1+a2+a3+a4 == in_put:
print("Your password is ",a1+a2+a3+a4)
break;
if l == 5:
plc5 += 1
a5 = a5.replace(a5,dictionary[plc5])
print(a1+a2+a3+a4+a5)
if a1+a2+a3+a4+a5 == in_put:
print("Your password is ",a1+a2+a3+a4+a5)
break;
if plc5 == 25:
plc5 = -1
plc4 += 1
a4 = a4.replace(a4,(dictionary[plc4]))
print(a1+a2+a3+a4+a5)
if a1+a2+a3+a4+a5 == in_put:
print("Your password is ",a1+a2+a3+a4+a5)
break;
if plc4 == 25:
plc4 = -1
plc3 += 1
a3 = a3.replace(a3,(dictionary[plc3]))
print(a1+a2+a3+a4+a5)
if a1+a2+a3+a4+a5 == in_put:
print("Your password is ",a1+a2+a3+a4+a5)
break;
if plc3 == 25:
plc3 = -1
plc2 += 1
a2 = a2.replace(a2,(dictionary[plc2]))
print(a1+a2+a3+a4+a5)
if a1+a2+a3+a4+a5 == in_put:
print("Your password is ",a1+a2+a3+a4+a5)
break;
if plc2 == 25:
plc2 = -1
plc1+= 1
a1 = a1.replace(a1,(dictionary[plc1]))
print(a1+a2+a3+a4+a5)
if a1+a2+a3+a4 == in_put:
print("Your password is ",a1+a2+a3+a4+a5)
break;
if l == 6:
plc6 += 1
a6 = a6.replace(a6,(dictionary[plc6]))
print(a1+a2+a3+a4+a5+a6)
if a1+a2+a3+a4+a5+a6 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6)
break;
if plc6 == 25:
plc6 = -1
plc5 += 1
a5 = a5.replace(a5,(dictionary[plc5]))
print(a1+a2+a3+a4+a5+a6)
if plc5 == 25:
plc5 = -1
plc4 += 1
a4 = a4.replace(a4,(dictionary[plc4]))
print(a1+a2+a3+a4+a5+a6)
if a1+a2+a3+a4+a5+a6 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6)
break;
if plc4 == 25:
plc4 = -1
plc3 += 1
a3 = a3.replace(a3,(dictionary[plc3]))
print(a1+a2+a3+a4+a5+a6)
if a1+a2+a3+a4+a5+a6 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6)
break;
if plc3 == 25:
plc3 = -1
plc2 += 1
a2 = a2.replace(a2,(dictionary[plc2]))
print(a1+a2+a3+a4+a5+a6)
if a1+a2+a3+a4+a5+a6 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6)
break;
if plc2 == 25:
plc2 = -1
plc1+= 1
a1 = a1.replace(a1,(dictionary[plc1]))
print(a1+a2+a3+a4+a5+a6)
if a1+a2+a3+a4+a5+a6 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6)
break;
if l == 7:
plc7 += 1
a7 = a7.replace(a7,(dictionary[plc7]))
print(a1+a2+a3+a4+a5+a6+a7)
if plc7 == 25:
plc7 = -1
plc6 += 1
a6 = a6.replace(a6,(dictionary[plc6]))
print(a1+a2+a3+a4+a5+a6+a7)
if a1+a2+a3+a4+a5+a6+a7 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7)
break;
if plc6 == 25:
plc6 = -1
plc5 += 1
a5 = a5.replace(a5,(dictionary[plc5]))
print(a1+a2+a3+a4+a5+a6+a7)
if plc5 == 25:
plc5 = -1
plc4 += 1
a4 = a4.replace(a4,(dictionary[plc4]))
print(a1+a2+a3+a4+a5+a6+a7)
if a1+a2+a3+a4+a5+a6+a7 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7)
break;
if plc4 == 25:
plc4 = -1
plc3 += 1
a3 = a3.replace(a3,(dictionary[plc3]))
print(a1+a2+a3+a4+a5+a6+a7)
if a1+a2+a3+a4+a5+a6+a7 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7)
break;
if plc3 == 25:
plc3 = -1
plc2 += 1
a2 = a2.replace(a2,(dictionary[plc2]))
print(a1+a2+a3+a4+a5+a6+a7)
if a1+a2+a3+a4+a5+a6+a7 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7)
break;
if plc2 == 25:
plc2 = -1
plc1+= 1
a1 = a1.replace(a1,(dictionary[plc1]))
print(a1+a2+a3+a4+a5+a6+a7)
if a1+a2+a3+a4+a5+a6+a7 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7)
break;
if l == 8:
plc8 += 1
a8 = a8.replace(a8,(dictionary[plc8]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc8 == 25:
plc8 = -1
plc7 += 1
a7 = a7.replace(a7,(dictionary[plc7]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc7 == 25:
plc7 = -1
plc6 +=1
a6 = a6.replace(a6,(dictionary[plc6]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc6 == 25:
plc6 = -1
plc5 += 1
a5 = a5.replace(a5,(dictionary[plc5]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc5 == 25:
plc5 = -1
plc4 += 1
a4 = a4.replace(a4,(dictionary[plc4]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc4 == 25:
plc4 = -1
plc3 += 1
a3 = a3.replace(a3,(dictionary[plc3]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc3 == 25:
plc3 = -1
plc2 +=1
a2 = a2.replace(a2,(dictionary[plc2]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if plc2 == 25:
plc2 = -1
plc1 += 1
a1 = a1.replace(a1,(dictionary[plc1]))
print(a1+a2+a3+a4+a5+a6+a7+a8)
if a1+a2+a3+a4+a5+a6+a7+a8 == in_put:
print("Your password is ",a1+a2+a3+a4+a5+a6+a7+a8)
break;
| 21.135849 | 131 | 0.55133 | 1,060 | 5,601 | 2.892453 | 0.062264 | 0.091324 | 0.136986 | 0.182648 | 0.929224 | 0.929224 | 0.929224 | 0.929224 | 0.923353 | 0.923353 | 0 | 0.175714 | 0.236922 | 5,601 | 264 | 132 | 21.215909 | 0.541647 | 0 | 0 | 0.893333 | 0 | 0 | 0.073023 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.093333 | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
3ad62ea6db7b7a577b655ccface35c255b593b39 | 208 | py | Python | colour/optimal/dataset/__init__.py | canavandl/colour | a453cd37b6135a9092d5ea5b2aafb8d19134bdff | [
"BSD-3-Clause"
] | 1 | 2019-06-27T11:32:48.000Z | 2019-06-27T11:32:48.000Z | colour/optimal/dataset/__init__.py | canavandl/colour | a453cd37b6135a9092d5ea5b2aafb8d19134bdff | [
"BSD-3-Clause"
] | null | null | null | colour/optimal/dataset/__init__.py | canavandl/colour | a453cd37b6135a9092d5ea5b2aafb8d19134bdff | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .optimal_colour_stimuli import ILLUMINANTS_OPTIMAL_COLOUR_STIMULI
__all__ = ['ILLUMINANTS_OPTIMAL_COLOUR_STIMULI']
| 23.111111 | 70 | 0.802885 | 26 | 208 | 5.769231 | 0.615385 | 0.26 | 0.4 | 0.413333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005348 | 0.100962 | 208 | 8 | 71 | 26 | 0.796791 | 0.201923 | 0 | 0 | 0 | 0 | 0.207317 | 0.207317 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
aaf4863bfdf90161cef2c72a31261d967291f400 | 167 | py | Python | mtl/__init__.py | Gaudeval/py-fuzzy-temporal-logic | df9be99b24a7955bf4e0c5cf6647f521dd7a255f | [
"BSD-3-Clause"
] | 44 | 2019-02-01T02:33:01.000Z | 2022-03-09T08:31:08.000Z | mtl/__init__.py | Gaudeval/py-fuzzy-temporal-logic | df9be99b24a7955bf4e0c5cf6647f521dd7a255f | [
"BSD-3-Clause"
] | 195 | 2018-09-24T22:32:21.000Z | 2022-01-23T03:36:12.000Z | mtl/__init__.py | Gaudeval/py-fuzzy-temporal-logic | df9be99b24a7955bf4e0c5cf6647f521dd7a255f | [
"BSD-3-Clause"
] | 13 | 2018-10-06T12:53:16.000Z | 2022-02-17T04:43:03.000Z | # flake8: noqa
from mtl.ast import TOP, BOT
from mtl.ast import (Interval, And, G, Neg,
AtomicPred, WeakUntil, Next)
from mtl.parser import parse
| 27.833333 | 49 | 0.658683 | 24 | 167 | 4.583333 | 0.708333 | 0.190909 | 0.181818 | 0.290909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008065 | 0.257485 | 167 | 5 | 50 | 33.4 | 0.879032 | 0.071856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
aaf9baca6eace0f0c28844b6ce47e9893d6a98f0 | 41 | py | Python | speed_of_light_in_nanosecond.py | jjtoledo/Treinamento-Data-Science | 5117975109695b1de06ae43b416972e66a4b7773 | [
"MIT"
] | null | null | null | speed_of_light_in_nanosecond.py | jjtoledo/Treinamento-Data-Science | 5117975109695b1de06ae43b416972e66a4b7773 | [
"MIT"
] | null | null | null | speed_of_light_in_nanosecond.py | jjtoledo/Treinamento-Data-Science | 5117975109695b1de06ae43b416972e66a4b7773 | [
"MIT"
] | null | null | null | print 299792458 * 100 * 1.0 / 1000000000
| 20.5 | 40 | 0.707317 | 6 | 41 | 4.833333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.727273 | 0.195122 | 41 | 1 | 41 | 41 | 0.151515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
c90e6bde57e77800ebd665dc0f2389b65f1f7620 | 97,087 | py | Python | gon/core/mix.py | lycantropos/gon | b3f811ece5989d1623b17d633a84071fbff6dd69 | [
"MIT"
] | 10 | 2020-07-18T12:55:52.000Z | 2022-03-20T07:09:10.000Z | gon/core/mix.py | lycantropos/gon | b3f811ece5989d1623b17d633a84071fbff6dd69 | [
"MIT"
] | 52 | 2019-07-11T16:59:01.000Z | 2022-03-29T19:41:59.000Z | gon/core/mix.py | lycantropos/gon | b3f811ece5989d1623b17d633a84071fbff6dd69 | [
"MIT"
] | 1 | 2020-03-22T12:56:07.000Z | 2020-03-22T12:56:07.000Z | from typing import Optional
from ground.hints import (Maybe,
Scalar)
from reprit.base import generate_repr
from .angle import Angle
from .compound import (Compound,
Indexable,
Linear,
Location,
Relation,
Shaped)
from .geometry import (Coordinate,
Geometry)
from .iterable import non_negative_min
from .multipoint import Multipoint
from .packing import (MIN_MIX_NON_EMPTY_COMPONENTS,
pack_mix)
from .point import Point
class Mix(Indexable[Coordinate]):
__slots__ = '_components', '_discrete', '_linear', '_shaped'
def __init__(self,
discrete: Maybe[Multipoint[Coordinate]],
linear: Maybe[Linear[Coordinate]],
shaped: Maybe[Shaped[Coordinate]]) -> None:
"""
Initializes mix.
Time complexity:
``O(1)``
Memory complexity:
``O(1)``
"""
self._components = self._discrete, self._linear, self._shaped = (
discrete, linear, shaped)
__repr__ = generate_repr(__init__)
def __and__(self, other: Compound[Coordinate]) -> Compound[Coordinate]:
"""
Returns intersection of the mix with the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix & mix == mix
True
"""
discrete_part = self.discrete & other
linear_part = self.linear & other
shaped_part = self.shaped & other
context = self._context
if isinstance(linear_part, Multipoint):
shaped_part |= linear_part
linear_part = context.empty
elif isinstance(linear_part, Mix):
shaped_part |= linear_part.discrete
linear_part = linear_part.linear
if isinstance(shaped_part, Multipoint):
linear_part |= shaped_part
if isinstance(linear_part, Mix):
discrete_part |= linear_part.discrete
linear_part = linear_part.linear
shaped_part = context.empty
elif isinstance(shaped_part, Linear):
linear_part |= shaped_part
shaped_part = context.empty
elif isinstance(shaped_part, Mix):
linear_part = (linear_part | shaped_part.linear
| shaped_part.discrete)
shaped_part = shaped_part.shaped
if isinstance(linear_part, Multipoint):
discrete_part |= linear_part
linear_part = context.empty
elif isinstance(linear_part, Mix):
discrete_part |= linear_part.discrete
linear_part = linear_part.linear
return pack_mix(discrete_part, linear_part, shaped_part, context.empty,
context.mix_cls)
__rand__ = __and__
def __contains__(self, point: Point[Coordinate]) -> bool:
"""
Checks if the mix contains the point.
Time complexity:
``O(log elements_count)`` expected after indexing,
``O(elements_count)`` worst after indexing or without it
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> Point(0, 0) in mix
True
>>> Point(1, 1) in mix
True
>>> Point(2, 2) in mix
True
>>> Point(3, 3) in mix
True
>>> Point(4, 3) in mix
True
>>> Point(5, 2) in mix
True
>>> Point(6, 1) in mix
True
>>> Point(7, 0) in mix
False
"""
return bool(self.locate(point))
def __eq__(self, other: 'Mix[Coordinate]') -> bool:
"""
Checks if mixes are equal.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix == mix
True
"""
return self is other or (self._components == other._components
if isinstance(other, Mix)
else NotImplemented)
def __ge__(self, other: Compound[Coordinate]) -> bool:
"""
Checks if the mix is a superset of the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix >= mix
True
"""
return (other is self._context.empty
or self == other
or ((self.shaped is not self._context.empty
or not isinstance(other, Shaped)
and (not isinstance(other, Mix)
or other.shaped is self._context.empty))
and self.relate(other) in (Relation.EQUAL,
Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN)
if isinstance(other, Compound)
else NotImplemented))
def __gt__(self, other: Compound[Coordinate]) -> bool:
"""
Checks if the mix is a strict superset of the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix > mix
False
"""
return (other is self._context.empty
or self != other
and ((self.shaped is not self._context.empty
or not isinstance(other, Shaped)
and (not isinstance(other, Mix)
or other.shaped is self._context.empty))
and self.relate(other) in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN)
if isinstance(other, Compound)
else NotImplemented))
def __hash__(self) -> int:
"""
Returns hash value of the mix.
Time complexity:
``O(components_size)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> hash(mix) == hash(mix)
True
"""
return hash(self._components)
def __le__(self, other: Compound[Coordinate]) -> bool:
"""
Checks if the mix is a subset of the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix <= mix
True
"""
return (self == other
or (not isinstance(other, Multipoint)
and (self.shaped is self._context.empty
or not isinstance(other, Linear)
and (not isinstance(other, Mix)
or other.shaped is not self._context.empty))
and self.relate(other) in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE,
Relation.EQUAL)
if isinstance(other, Compound)
else NotImplemented))
def __lt__(self, other: Compound[Coordinate]) -> bool:
"""
Checks if the mix is a strict subset of the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix < mix
False
"""
return (self != other
and (not isinstance(other, Multipoint)
and (self.shaped is self._context.empty
or not isinstance(other, Linear)
and (not isinstance(other, Mix)
or other.shaped is not self._context.empty))
and self.relate(other) in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE)
if isinstance(other, Compound)
else NotImplemented))
def __or__(self, other: Compound[Coordinate]) -> Compound[Coordinate]:
"""
Returns union of the mix with the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix | mix == mix
True
"""
context = self._context
if isinstance(other, Multipoint):
return context.mix_cls(self.discrete
| (other - self.shaped - self.linear),
self.linear, self.shaped)
elif isinstance(other, Linear):
discrete_part, linear_part = self.discrete, self.linear
shaped_part = self.shaped | other
if isinstance(shaped_part, Linear):
linear_part = linear_part | shaped_part | discrete_part
shaped_part = context.empty
elif isinstance(shaped_part, Mix):
linear_part = linear_part | shaped_part.linear | discrete_part
shaped_part = shaped_part.shaped
else:
# other is subset of the shaped component
return pack_mix(discrete_part, linear_part, shaped_part,
context.empty, context.mix_cls)
if isinstance(linear_part, Mix):
discrete_part, linear_part = (linear_part.discrete,
linear_part.linear)
else:
discrete_part = context.empty
return pack_mix(discrete_part, linear_part, shaped_part,
context.empty, context.mix_cls)
elif isinstance(other, (Shaped, Mix)):
return self.shaped | other | self.linear | self.discrete
else:
return NotImplemented
__ror__ = __or__
def __rsub__(self, other: Compound[Coordinate]) -> Compound[Coordinate]:
"""
Returns difference of the other geometry with the mix.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
"""
return ((other - self.discrete) & (other - self.linear)
& other - self.shaped)
def __sub__(self, other: Compound[Coordinate]) -> Compound[Coordinate]:
"""
Returns difference of the mix with the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (EMPTY, Contour, Mix, Multipoint, Point,
... Polygon, Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix - mix is EMPTY
True
"""
return pack_mix(self.discrete - other, self.linear - other,
self.shaped - other, self._context.empty,
self._context.mix_cls)
def __xor__(self, other: Compound[Coordinate]) -> Compound[Coordinate]:
"""
Returns symmetric difference of the mix with the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (EMPTY, Contour, Mix, Multipoint, Point,
... Polygon, Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix ^ mix is EMPTY
True
"""
context = self._context
if isinstance(other, Multipoint):
rest_other = other - self.shaped - self.linear
return pack_mix(self.discrete ^ rest_other, self.linear,
self.shaped, context.empty, context.mix_cls)
elif isinstance(other, Linear):
discrete_part, linear_part = self.discrete, self.linear
shaped_part = self.shaped ^ other
if isinstance(shaped_part, Linear):
linear_part = linear_part ^ shaped_part ^ discrete_part
shaped_part = context.empty
elif isinstance(shaped_part, Mix):
linear_part = linear_part ^ shaped_part.linear ^ discrete_part
shaped_part = shaped_part.shaped
else:
# other is subset of the shaped component
return pack_mix(discrete_part, linear_part, shaped_part,
context.empty, context.mix_cls)
if isinstance(linear_part, Mix):
discrete_part, linear_part = (linear_part.discrete,
linear_part.linear)
else:
discrete_part = context.empty
return pack_mix(discrete_part, linear_part, shaped_part,
context.empty, context.mix_cls)
elif isinstance(other, (Shaped, Mix)):
return self.shaped ^ other ^ self.linear ^ self.discrete
else:
return NotImplemented
__rxor__ = __xor__
@property
def centroid(self) -> Point[Coordinate]:
"""
Returns centroid of the mix.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.centroid == Point(3, 3)
True
"""
return (self.linear
if self.shaped is self._context.empty
else self.shaped).centroid
@property
def discrete(self) -> Maybe[Multipoint[Coordinate]]:
"""
Returns disrete component of the mix.
Time complexity:
``O(1)``
Memory complexity:
``O(1)``
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.discrete == Multipoint([Point(3, 3)])
True
"""
return self._discrete
@property
def shaped(self) -> Maybe[Shaped[Coordinate]]:
"""
Returns shaped component of the mix.
Time complexity:
``O(1)``
Memory complexity:
``O(1)``
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> from gon.base import Contour
>>> mix.shaped == Polygon(Contour([Point(0, 0), Point(6, 0),
... Point(6, 6), Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4),
... Point(4, 4), Point(4, 2)])])
True
"""
return self._shaped
@property
def linear(self) -> Maybe[Linear[Coordinate]]:
"""
Returns linear component of the mix.
Time complexity:
``O(1)``
Memory complexity:
``O(1)``
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.linear == Segment(Point(6, 6), Point(6, 8))
True
"""
return self._linear
def distance_to(self, other: Geometry[Coordinate]) -> Scalar:
"""
Returns distance between the mix and the other geometry.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.distance_to(mix) == 0
True
"""
return non_negative_min(component.distance_to(other)
for component in self._components
if component is not self._context.empty)
def index(self) -> None:
"""
Pre-processes the mix to potentially improve queries.
Time complexity:
``O(elements_count * log elements_count)`` expected,
``O(elements_count ** 2)`` worst
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.index()
"""
if isinstance(self.discrete, Indexable):
self.discrete.index()
if isinstance(self.linear, Indexable):
self.linear.index()
if isinstance(self.shaped, Indexable):
self.shaped.index()
def locate(self, point: Point[Coordinate]) -> Location:
"""
Finds location of the point relative to the mix.
Time complexity:
``O(log elements_count)`` expected after indexing,
``O(elements_count)`` worst after indexing or without it
Memory complexity:
``O(1)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.locate(Point(0, 0)) is Location.BOUNDARY
True
>>> mix.locate(Point(1, 1)) is Location.INTERIOR
True
>>> mix.locate(Point(2, 2)) is Location.BOUNDARY
True
>>> mix.locate(Point(3, 3)) is Location.BOUNDARY
True
>>> mix.locate(Point(4, 3)) is Location.BOUNDARY
True
>>> mix.locate(Point(5, 2)) is Location.INTERIOR
True
>>> mix.locate(Point(6, 1)) is Location.BOUNDARY
True
>>> mix.locate(Point(7, 0)) is Location.EXTERIOR
True
"""
for candidate in self._components:
location = candidate.locate(point)
if location is not Location.EXTERIOR:
return location
return Location.EXTERIOR
def relate(self, other: Compound[Coordinate]) -> Relation:
"""
Finds relation between the mix and the other geometry.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.relate(mix) is Relation.EQUAL
True
"""
return (self._relate_discrete(other)
if isinstance(other, Multipoint)
else (self._relate_linear(other)
if isinstance(other, Linear)
else (self._relate_shaped(other)
if isinstance(other, Shaped)
else (self._relate_mix(other)
if isinstance(other, Mix)
else other.relate(self).complement))))
def rotate(self,
angle: Angle,
point: Optional[Point[Coordinate]] = None) -> 'Mix[Coordinate]':
"""
Rotates the mix by given angle around given point.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Angle, Contour, Mix, Multipoint, Point,
... Polygon, Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.rotate(Angle(1, 0)) == mix
True
>>> (mix.rotate(Angle(0, 1), Point(1, 1))
... == Mix(Multipoint([Point(-1, 3)]),
... Segment(Point(-4, 6), Point(-6, 6)),
... Polygon(Contour([Point(2, 0), Point(2, 6), Point(-4, 6),
... Point(-4, 0)]),
... [Contour([Point(0, 2), Point(-2, 2), Point(-2, 4),
... Point(0, 4)])])))
True
"""
return self._context.mix_cls(self.discrete.rotate(angle, point),
self.linear.rotate(angle, point),
self.shaped.rotate(angle, point))
def scale(self,
factor_x: Coordinate,
factor_y: Optional[Coordinate] = None) -> Compound[Coordinate]:
"""
Scales the mix by given factor.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.scale(1) == mix
True
>>> (mix.scale(1, 2)
... == Mix(Multipoint([Point(3, 6)]),
... Segment(Point(6, 12), Point(6, 16)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 12),
... Point(0, 12)]),
... [Contour([Point(2, 4), Point(2, 8), Point(4, 8),
... Point(4, 4)])])))
True
"""
if factor_y is None:
factor_y = factor_x
return (self._context.mix_cls(self.discrete.scale(factor_x, factor_y),
self.linear.scale(factor_x, factor_y),
self.shaped.scale(factor_x, factor_y))
if factor_x and factor_y
else ((self.discrete.scale(factor_x, factor_y)
| self.linear.scale(factor_x, factor_y)
| self.shaped.scale(factor_x, factor_y))
if factor_x or factor_y
else
self._context.multipoint_cls(
[self._context.point_cls(factor_x, factor_y)])))
def translate(self,
step_x: Coordinate,
step_y: Coordinate) -> 'Mix[Coordinate]':
"""
Translates the mix by given step.
Time complexity:
``O(elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> (mix.translate(1, 2)
... == Mix(Multipoint([Point(4, 5)]),
... Segment(Point(7, 8), Point(7, 10)),
... Polygon(Contour([Point(1, 2), Point(7, 2), Point(7, 8),
... Point(1, 8)]),
... [Contour([Point(3, 4), Point(3, 6), Point(5, 6),
... Point(5, 4)])])))
True
"""
return self._context.mix_cls(self.discrete.translate(step_x, step_y),
self.linear.translate(step_x, step_y),
self.shaped.translate(step_x, step_y))
def validate(self) -> None:
"""
Checks if the mix is valid.
Time complexity:
``O(elements_count * log elements_count)``
Memory complexity:
``O(elements_count)``
where
.. code-block:: python
elements_count = discrete_size + linear_size\
+ shaped_vertices_count
discrete_size = len(points)
linear_size = len(segments)
shaped_vertices_count = (sum(len(polygon.border.vertices)
+ sum(len(hole.vertices)
for hole in polygon.holes)
for polygon in polygons)
points = [] if self.discrete is EMPTY else self.discrete.points
segments = ([]
if self.linear is EMPTY
else ([self.linear]
if isinstance(self.linear, Segment)
else self.linear.segments))
polygons = ([]
if self.shaped is EMPTY
else (self.shaped.polygons
if isinstance(self.linear, Multipolygon)
else [self.shaped]))
>>> from gon.base import (Contour, Mix, Multipoint, Point, Polygon,
... Segment)
>>> mix = Mix(Multipoint([Point(3, 3)]),
... Segment(Point(6, 6), Point(6, 8)),
... Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6),
... Point(0, 6)]),
... [Contour([Point(2, 2), Point(2, 4), Point(4, 4),
... Point(4, 2)])]))
>>> mix.validate()
"""
if (sum(component is not self._context.empty for component in
self._components)
< MIN_MIX_NON_EMPTY_COMPONENTS):
raise ValueError('At least {count} components should not be empty.'
.format(count=MIN_MIX_NON_EMPTY_COMPONENTS))
for component in self._components:
component.validate()
if (not self.discrete.disjoint(self.linear)
or not self.discrete.disjoint(self.shaped)):
raise ValueError('Discrete component should be disjoint '
'from other components.')
shaped_linear_relation = self.shaped.relate(self.linear)
if shaped_linear_relation in (Relation.CROSS, Relation.COMPONENT,
Relation.ENCLOSED, Relation.WITHIN):
raise ValueError('Linear component should not {} shaped component.'
.format('cross'
if (shaped_linear_relation
is Relation.CROSS)
else 'be subset of'))
elif (shaped_linear_relation is Relation.TOUCH
and any(polygon.border.relate(self.linear)
in (Relation.OVERLAP, Relation.COMPOSITE)
or any(hole.relate(self.linear)
in (Relation.OVERLAP, Relation.COMPOSITE)
for hole in polygon.holes)
for polygon in (
self.shaped.polygons
if isinstance(self.shaped,
self._context.multipolygon_cls)
else [self.shaped]))):
raise ValueError('Linear component should not overlap '
'shaped component borders.')
def _relate_linear(self, other: Linear[Coordinate]) -> Relation:
if self.shaped is self._context.empty:
linear_relation = self.linear.relate(other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(other)
return (Relation.TOUCH
if discrete_relation is Relation.COMPOSITE
else discrete_relation)
elif linear_relation is Relation.COMPOSITE:
discrete_relation = self.discrete.relate(other)
return (linear_relation
if discrete_relation is linear_relation
else Relation.OVERLAP)
else:
return (Relation.COMPONENT
if linear_relation is Relation.EQUAL
else linear_relation)
else:
shaped_relation = self.shaped.relate(other)
if shaped_relation is Relation.DISJOINT:
linear_relation = self.linear.relate(other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(other)
return (Relation.TOUCH
if discrete_relation is Relation.COMPOSITE
else discrete_relation)
elif linear_relation in (Relation.TOUCH,
Relation.CROSS,
Relation.COMPONENT):
return linear_relation
else:
return (Relation.COMPONENT
if linear_relation is Relation.EQUAL
else Relation.TOUCH)
elif (shaped_relation is Relation.TOUCH
or shaped_relation is Relation.CROSS):
rest_other = other - self.shaped
linear_relation = self.linear.relate(rest_other)
return (Relation.COMPONENT
if (linear_relation is Relation.EQUAL
or linear_relation is Relation.COMPONENT)
else shaped_relation)
else:
return shaped_relation
def _relate_mix(self, other: 'Mix[Coordinate]') -> Relation:
if self.shaped is other.shaped is self._context.empty:
linear_components_relation = self.linear.relate(other.linear)
if linear_components_relation is Relation.DISJOINT:
return (linear_components_relation
if (self._relate_discrete(other.discrete)
is other._relate_discrete(self.discrete)
is linear_components_relation)
else Relation.TOUCH)
elif linear_components_relation is Relation.COMPOSITE:
discrete_relation = other._relate_discrete(self.discrete)
return (linear_components_relation
if discrete_relation is Relation.COMPONENT
else Relation.OVERLAP)
elif linear_components_relation is Relation.EQUAL:
other_discrete_relation = self.discrete.relate(other.discrete)
return (Relation.OVERLAP
if other_discrete_relation is Relation.DISJOINT
else other_discrete_relation)
elif linear_components_relation is Relation.COMPONENT:
other_discrete_relation = self._relate_discrete(other.discrete)
return (linear_components_relation
if other_discrete_relation is Relation.COMPONENT
else Relation.OVERLAP)
else:
return linear_components_relation
elif self.shaped is self._context.empty:
linear_relation = other._relate_linear(self.linear)
if linear_relation is Relation.CROSS:
return linear_relation
discrete_relation = other._relate_discrete(self.discrete)
if linear_relation is Relation.DISJOINT:
return (discrete_relation
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (Relation.TOUCH
if discrete_relation is Relation.COMPONENT
else Relation.CROSS))
elif linear_relation is Relation.TOUCH:
return (Relation.CROSS
if discrete_relation in (Relation.CROSS,
Relation.ENCLOSED,
Relation.WITHIN)
else linear_relation)
elif linear_relation is Relation.COMPONENT:
return (Relation.TOUCH
if discrete_relation is Relation.DISJOINT
else (discrete_relation
if (discrete_relation is Relation.TOUCH
or discrete_relation is Relation.CROSS)
else (Relation.COMPOSITE
if discrete_relation is Relation.COMPONENT
else Relation.ENCLOSES)))
else:
return (Relation.CROSS
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (Relation.COVER
if discrete_relation is Relation.WITHIN
else Relation.ENCLOSES))
elif other.shaped is self._context.empty:
other_linear_relation = self._relate_linear(other.linear)
if other_linear_relation is Relation.CROSS:
return other_linear_relation
other_discrete_relation = self._relate_discrete(other.discrete)
if other_linear_relation is Relation.DISJOINT:
return (other_discrete_relation
if other_discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (Relation.TOUCH
if other_discrete_relation is Relation.COMPONENT
else Relation.CROSS))
elif other_linear_relation is Relation.TOUCH:
return (Relation.CROSS
if other_discrete_relation in (Relation.CROSS,
Relation.ENCLOSED,
Relation.WITHIN)
else other_linear_relation)
elif other_linear_relation is Relation.COMPONENT:
return (Relation.TOUCH
if (other_discrete_relation is Relation.DISJOINT
or other_discrete_relation is Relation.TOUCH)
else (other_discrete_relation
if (other_discrete_relation is Relation.CROSS
or (other_discrete_relation
is Relation.COMPONENT))
else Relation.ENCLOSED))
elif other_linear_relation is Relation.ENCLOSED:
return (Relation.CROSS
if other_discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else other_linear_relation)
else:
return (Relation.CROSS
if other_discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (Relation.ENCLOSED
if other_discrete_relation is Relation.COMPONENT
else other_linear_relation))
shaped_components_relation = self.shaped.relate(other.shaped)
if (shaped_components_relation is Relation.DISJOINT
or shaped_components_relation is Relation.TOUCH):
if self.linear is other.linear is self._context.empty:
other_discrete_relation = self._relate_discrete(other.discrete)
if other_discrete_relation is Relation.CROSS:
return other_discrete_relation
elif (other_discrete_relation is Relation.ENCLOSED
or other_discrete_relation is Relation.WITHIN):
return Relation.CROSS
else:
discrete_relation = other._relate_discrete(self.discrete)
if (discrete_relation
is other_discrete_relation
is Relation.DISJOINT):
return shaped_components_relation
elif discrete_relation is Relation.CROSS:
return discrete_relation
elif (discrete_relation is Relation.ENCLOSED
or discrete_relation is Relation.WITHIN):
return Relation.CROSS
else:
return Relation.TOUCH
elif self.linear is self._context.empty:
other_linear_relation = self._relate_linear(other.linear)
if other_linear_relation is Relation.CROSS:
return other_linear_relation
elif (other_linear_relation is Relation.ENCLOSED
or other_linear_relation is Relation.WITHIN):
return Relation.CROSS
else:
discrete_relation = other._relate_discrete(self.discrete)
if discrete_relation is Relation.CROSS:
return discrete_relation
elif (discrete_relation is Relation.ENCLOSED
or discrete_relation is Relation.WITHIN):
return Relation.CROSS
elif other.discrete is self._context.empty:
return (shaped_components_relation
if (discrete_relation
is other_linear_relation
is Relation.DISJOINT)
else Relation.TOUCH)
else:
other_discrete_relation = self._relate_discrete(
other.discrete)
if other_discrete_relation is Relation.CROSS:
return other_discrete_relation
elif (other_discrete_relation is Relation.ENCLOSED
or other_discrete_relation is Relation.WITHIN):
return Relation.CROSS
elif (discrete_relation
is other_discrete_relation
is other_linear_relation
is Relation.DISJOINT):
return shaped_components_relation
else:
return Relation.TOUCH
elif other.linear is self._context.empty:
linear_relation = other._relate_linear(self.linear)
if linear_relation is Relation.CROSS:
return linear_relation
elif (linear_relation is Relation.ENCLOSED
or linear_relation is Relation.WITHIN):
return Relation.CROSS
else:
other_discrete_relation = self._relate_discrete(
other.discrete)
if other_discrete_relation is Relation.CROSS:
return other_discrete_relation
elif (other_discrete_relation is Relation.ENCLOSED
or other_discrete_relation is Relation.WITHIN):
return Relation.CROSS
elif self.discrete is self._context.empty:
return (shaped_components_relation
if (linear_relation
is other_discrete_relation
is Relation.DISJOINT)
else Relation.TOUCH)
else:
discrete_relation = other._relate_discrete(
self.discrete)
if discrete_relation is Relation.CROSS:
return discrete_relation
elif (discrete_relation is Relation.ENCLOSED
or discrete_relation is Relation.WITHIN):
return Relation.CROSS
elif (discrete_relation
is linear_relation
is other_discrete_relation
is Relation.DISJOINT):
return shaped_components_relation
else:
return Relation.TOUCH
else:
other_linear_relation = self._relate_linear(other.linear)
if other_linear_relation is Relation.CROSS:
return other_linear_relation
elif (other_linear_relation is Relation.ENCLOSED
or other_linear_relation is Relation.WITHIN):
return Relation.CROSS
else:
linear_relation = other._relate_linear(self.linear)
if linear_relation is Relation.CROSS:
return linear_relation
elif (linear_relation is Relation.ENCLOSED
or linear_relation is Relation.WITHIN):
return Relation.CROSS
elif self.discrete is self._context.empty:
other_discrete_relation = self._relate_discrete(
other.discrete)
return (other_discrete_relation
if other_discrete_relation is Relation.CROSS
else
(Relation.CROSS
if (other_discrete_relation
is Relation.ENCLOSED
or other_discrete_relation
is Relation.WITHIN)
else (shaped_components_relation
if (other_discrete_relation
is linear_relation
is other_linear_relation
is Relation.DISJOINT)
else Relation.TOUCH)))
elif other.discrete is self._context.empty:
discrete_relation = other._relate_discrete(
self.discrete)
return (discrete_relation
if discrete_relation is Relation.CROSS
else
(Relation.CROSS
if (discrete_relation is Relation.ENCLOSED
or discrete_relation is Relation.WITHIN)
else (shaped_components_relation
if (discrete_relation
is linear_relation
is other_linear_relation
is Relation.DISJOINT)
else Relation.TOUCH)))
else:
other_discrete_relation = self._relate_discrete(
other.discrete)
if other_discrete_relation is Relation.CROSS:
return other_discrete_relation
elif (other_discrete_relation is Relation.ENCLOSED
or other_discrete_relation is Relation.WITHIN):
return Relation.CROSS
else:
discrete_relation = other._relate_discrete(
self.discrete)
return (discrete_relation
if discrete_relation is Relation.CROSS
else (Relation.CROSS
if (discrete_relation
is Relation.ENCLOSED
or discrete_relation
is Relation.WITHIN)
else
(shaped_components_relation
if (discrete_relation
is linear_relation
is other_discrete_relation
is other_linear_relation
is Relation.DISJOINT)
else Relation.TOUCH)))
elif shaped_components_relation in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE):
if self.linear is self._context.empty:
discrete_relation = (other._relate_discrete(self.discrete)
.complement)
return (shaped_components_relation
if discrete_relation is shaped_components_relation
else (Relation.ENCLOSES
if discrete_relation in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE)
else Relation.OVERLAP))
else:
linear_relation = other._relate_linear(self.linear).complement
if linear_relation is shaped_components_relation:
if self.discrete is self._context.empty:
return shaped_components_relation
else:
discrete_relation = other._relate_discrete(
self.discrete).complement
return (shaped_components_relation
if (discrete_relation
is shaped_components_relation)
else
(Relation.ENCLOSES
if discrete_relation in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE)
else Relation.OVERLAP))
elif linear_relation in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE):
if self.discrete is self._context.empty:
return Relation.ENCLOSES
else:
discrete_relation = other._relate_discrete(
self.discrete).complement
return (Relation.ENCLOSES
if discrete_relation in (Relation.COVER,
Relation.ENCLOSES,
Relation.COMPOSITE)
else Relation.OVERLAP)
else:
return Relation.OVERLAP
elif shaped_components_relation is Relation.EQUAL:
linear_components_relation = self.linear.relate(other.linear)
if self.linear is other.linear is self._context.empty:
discrete_components_relation = self.discrete.relate(
other.discrete)
return (
shaped_components_relation
if (self.discrete is other.discrete is self._context.empty
or discrete_components_relation is Relation.EQUAL)
else
(discrete_components_relation
if (discrete_components_relation is Relation.COMPOSITE
or discrete_components_relation is Relation.COMPONENT)
else Relation.OVERLAP))
elif self.linear is self._context.empty:
discrete_components_relation = other._relate_discrete(
self.discrete)
return (
Relation.COMPOSITE
if (discrete_components_relation is Relation.EQUAL
or discrete_components_relation is Relation.COMPONENT)
else Relation.OVERLAP)
elif other.linear is self._context.empty:
discrete_components_relation = self._relate_discrete(
other.discrete)
return (
Relation.COMPONENT
if (discrete_components_relation is Relation.EQUAL
or discrete_components_relation is Relation.COMPONENT)
else Relation.OVERLAP)
elif linear_components_relation is Relation.COMPOSITE:
discrete_components_relation = other._relate_discrete(
self.discrete)
return (
linear_components_relation
if (self.discrete is self._context.empty
or discrete_components_relation is Relation.EQUAL
or discrete_components_relation is Relation.COMPONENT)
else Relation.OVERLAP)
elif linear_components_relation is Relation.EQUAL:
discrete_components_relation = self.discrete.relate(
other.discrete)
return (
shaped_components_relation
if (self.discrete is other.discrete is self._context.empty
or discrete_components_relation is Relation.EQUAL)
else
(Relation.COMPOSITE
if self.discrete is self._context.empty
else
(Relation.COMPONENT
if other.discrete is self._context.empty
else
(discrete_components_relation
if
(discrete_components_relation is Relation.COMPONENT
or discrete_components_relation is Relation.COMPOSITE)
else Relation.OVERLAP))))
elif linear_components_relation is Relation.COMPONENT:
discrete_components_relation = self._relate_discrete(
other.discrete)
return (
linear_components_relation
if (other.discrete is self._context.empty
or discrete_components_relation is Relation.EQUAL
or discrete_components_relation is Relation.COMPONENT)
else Relation.OVERLAP)
else:
return Relation.OVERLAP
elif shaped_components_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN):
if other.linear is self._context.empty:
discrete_relation = self._relate_discrete(other.discrete)
return (shaped_components_relation
if discrete_relation is shaped_components_relation
else (Relation.ENCLOSED
if discrete_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN)
else Relation.OVERLAP))
else:
linear_relation = self._relate_linear(other.linear)
if linear_relation is shaped_components_relation:
if other.discrete is self._context.empty:
return shaped_components_relation
else:
discrete_relation = self._relate_discrete(
other.discrete)
return (shaped_components_relation
if (discrete_relation
is shaped_components_relation)
else
(Relation.ENCLOSED
if discrete_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN)
else Relation.OVERLAP))
elif linear_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN):
if other.discrete is self._context.empty:
return Relation.ENCLOSED
else:
discrete_relation = self._relate_discrete(
other.discrete)
return (Relation.ENCLOSED
if discrete_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN)
else Relation.OVERLAP)
else:
return Relation.OVERLAP
else:
return shaped_components_relation
def _relate_discrete(self, other: Multipoint[Coordinate]) -> Relation:
if self.shaped is self._context.empty:
linear_relation = self.linear.relate(other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(other)
return (discrete_relation
if discrete_relation is Relation.DISJOINT
else (Relation.COMPONENT
if (discrete_relation is Relation.COMPONENT
or discrete_relation is Relation.EQUAL)
else Relation.TOUCH))
elif linear_relation is Relation.TOUCH:
rest_other = other - self.linear
discrete_relation = self.discrete.relate(rest_other)
return (Relation.COMPONENT
if (discrete_relation is Relation.EQUAL
or discrete_relation is Relation.COMPONENT)
else linear_relation)
else:
return linear_relation
else:
shaped_relation = self.shaped.relate(other)
if shaped_relation in (Relation.COMPONENT,
Relation.ENCLOSED,
Relation.WITHIN):
return shaped_relation
elif (shaped_relation is Relation.TOUCH
or shaped_relation is Relation.CROSS):
rest_other = other - self.shaped
if self.linear is self._context.empty:
discrete_relation = self.discrete.relate(rest_other)
return (Relation.COMPONENT
if (discrete_relation is Relation.EQUAL
or discrete_relation is Relation.COMPONENT)
else shaped_relation)
else:
linear_relation = self.linear.relate(rest_other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(rest_other)
return ((Relation.COMPONENT
if shaped_relation is Relation.TOUCH
else Relation.ENCLOSED)
if (discrete_relation is Relation.COMPONENT
or discrete_relation is Relation.EQUAL)
else shaped_relation)
elif linear_relation is Relation.TOUCH:
rest_other -= self.linear
discrete_relation = self.discrete.relate(rest_other)
return (Relation.COMPONENT
if (discrete_relation is Relation.COMPONENT
or discrete_relation is Relation.EQUAL)
else shaped_relation)
else:
return (Relation.COMPONENT
if shaped_relation is Relation.TOUCH
else Relation.ENCLOSED)
else:
linear_relation = self.linear.relate(other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(other)
return (shaped_relation
if discrete_relation is Relation.DISJOINT
else (Relation.COMPONENT
if (discrete_relation is Relation.COMPONENT
or discrete_relation is Relation.EQUAL)
else Relation.TOUCH))
elif linear_relation is Relation.TOUCH:
rest_other = other - self.linear
discrete_relation = self.discrete.relate(rest_other)
return (shaped_relation
if discrete_relation is Relation.DISJOINT
else (Relation.COMPONENT
if (discrete_relation is Relation.COMPONENT
or discrete_relation is Relation.EQUAL)
else Relation.TOUCH))
else:
return linear_relation
def _relate_shaped(self, other: Shaped[Coordinate]) -> Relation:
if self.shaped is self._context.empty:
linear_relation = self.linear.relate(other)
if (linear_relation is Relation.DISJOINT
or linear_relation is Relation.TOUCH):
discrete_relation = self.discrete.relate(other)
return (linear_relation
if discrete_relation is Relation.DISJOINT
else (discrete_relation
if (discrete_relation is Relation.TOUCH
or discrete_relation is Relation.CROSS)
else (Relation.TOUCH
if (discrete_relation
is Relation.COMPOSITE)
else Relation.CROSS)))
elif (linear_relation is Relation.COVER
or linear_relation is Relation.ENCLOSES):
discrete_relation = self.discrete.relate(other)
return (Relation.CROSS
if (discrete_relation is Relation.DISJOINT
or discrete_relation is Relation.TOUCH)
else (discrete_relation
if (discrete_relation is linear_relation
or discrete_relation is Relation.CROSS)
else Relation.ENCLOSES))
elif linear_relation is Relation.COMPOSITE:
discrete_relation = self.discrete.relate(other)
return (Relation.TOUCH
if discrete_relation is Relation.DISJOINT
else (discrete_relation
if (discrete_relation is Relation.TOUCH
or discrete_relation is Relation.CROSS)
else (linear_relation
if discrete_relation is linear_relation
else Relation.CROSS)))
else:
return linear_relation
else:
shaped_relation = self.shaped.relate(other)
if shaped_relation is Relation.DISJOINT:
linear_relation = self.linear.relate(other)
if linear_relation is Relation.DISJOINT:
discrete_relation = self.discrete.relate(other)
return (discrete_relation
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (Relation.TOUCH
if discrete_relation is Relation.COMPOSITE
else Relation.CROSS))
elif (linear_relation is Relation.TOUCH
or linear_relation is Relation.COMPOSITE):
discrete_relation = self.discrete.relate(other)
return (Relation.TOUCH
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.COMPOSITE)
else Relation.CROSS)
else:
return Relation.CROSS
elif shaped_relation is Relation.TOUCH:
linear_relation = self.linear.relate(other)
if linear_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.COMPOSITE):
discrete_relation = self.discrete.relate(other)
return (shaped_relation
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.COMPOSITE)
else Relation.CROSS)
else:
return Relation.CROSS
elif (shaped_relation is Relation.COVER
or shaped_relation is Relation.COMPOSITE):
if self.linear is self._context.empty:
discrete_relation = self.discrete.relate(other)
return (Relation.OVERLAP
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (shaped_relation
if discrete_relation is shaped_relation
else Relation.ENCLOSES))
else:
linear_relation = self.linear.relate(other)
if linear_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS):
return Relation.OVERLAP
elif self.discrete is self._context.empty:
return (shaped_relation
if linear_relation is shaped_relation
else Relation.ENCLOSES)
else:
discrete_relation = self.discrete.relate(other)
return (Relation.OVERLAP
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else (shaped_relation
if (discrete_relation
is linear_relation
is shaped_relation)
else Relation.ENCLOSES))
elif shaped_relation is Relation.ENCLOSES:
if self.linear is self._context.empty:
discrete_relation = self.discrete.relate(other)
return (Relation.OVERLAP
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else Relation.ENCLOSES)
else:
linear_relation = self.linear.relate(other)
if linear_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS):
return Relation.OVERLAP
elif self.discrete is self._context.empty:
return shaped_relation
else:
discrete_relation = self.discrete.relate(other)
return (Relation.OVERLAP
if discrete_relation in (Relation.DISJOINT,
Relation.TOUCH,
Relation.CROSS)
else Relation.ENCLOSES)
else:
return (Relation.COMPONENT
if shaped_relation is Relation.EQUAL
else shaped_relation)
| 48.015331 | 79 | 0.453727 | 8,243 | 97,087 | 5.19835 | 0.02305 | 0.044574 | 0.068891 | 0.047328 | 0.900607 | 0.871529 | 0.853116 | 0.827515 | 0.792695 | 0.762614 | 0 | 0.012935 | 0.469661 | 97,087 | 2,021 | 80 | 48.03909 | 0.819282 | 0.372975 | 0 | 0.703093 | 1 | 0 | 0.006095 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029897 | false | 0 | 0.010309 | 0 | 0.187629 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c9250b46824585a5b2577d6247c6c70e9edd443f | 5,557 | py | Python | pyaz/synapse/role/assignment/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/synapse/role/assignment/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/synapse/role/assignment/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | '''
Manage Synapse's role assignments.
'''
from .... pyaz_utils import _call_az
def create(role, workspace_name, assignee=None, assignee_object_id=None, assignee_principal_type=None, assignment_id=None, item=None, item_type=None, scope=None):
'''
Create a role assignment.
Required Parameters:
- role -- The role name/id that is assigned to the principal.
- workspace_name -- The workspace name.
Optional Parameters:
- assignee -- Represent a user or service principal. Supported format: object id, user sign-in name, or service principal name.
- assignee_object_id -- Use this parameter instead of '--assignee' to bypass Graph API invocation in case of insufficient privileges. This parameter only works with object ids for users, groups, service principals, and managed identities. For managed identities use the principal id. For service principals, use the object id and not the app id.
- assignee_principal_type -- use with --assignee-object-id to avoid errors caused by propagation latency in AAD Graph
- assignment_id -- Custom role assignment id in guid format, if not specified, assignment id will be randomly generated.
- item -- Item granted access in the workspace. Using with --item-type to combine the scope of assignment
- item_type -- Item type granted access in the workspace. Using with --item to combine the scope of assignment.
- scope -- A scope defines the resources or artifacts that the access applies to. Synapse supports hierarchical scopes. Permissions granted at a higher-level scope are inherited by objects at a lower level. In Synapse RBAC, the top-level scope is a workspace. Assigning a role with workspace scope grants permissions to all applicable objects in the workspace.
'''
return _call_az("az synapse role assignment create", locals())
def list(workspace_name, assignee=None, assignee_object_id=None, item=None, item_type=None, role=None, scope=None):
'''
List role assignments.
Required Parameters:
- workspace_name -- The workspace name.
Optional Parameters:
- assignee -- Represent a user or service principal. Supported format: object id, user sign-in name, or service principal name.
- assignee_object_id -- Use this parameter instead of '--assignee' to bypass Graph API invocation in case of insufficient privileges. This parameter only works with object ids for users, groups, service principals, and managed identities. For managed identities use the principal id. For service principals, use the object id and not the app id.
- item -- Item granted access in the workspace. Using with --item-type to combine the scope of assignment
- item_type -- Item type granted access in the workspace. Using with --item to combine the scope of assignment.
- role -- The role name/id that is assigned to the principal.
- scope -- A scope defines the resources or artifacts that the access applies to. Synapse supports hierarchical scopes. Permissions granted at a higher-level scope are inherited by objects at a lower level. In Synapse RBAC, the top-level scope is a workspace. Assigning a role with workspace scope grants permissions to all applicable objects in the workspace.
'''
return _call_az("az synapse role assignment list", locals())
def show(id, workspace_name):
'''
Get a role assignment by id.
Required Parameters:
- id -- Id of the role that is assigned to the principal.
- workspace_name -- The workspace name.
'''
return _call_az("az synapse role assignment show", locals())
def delete(workspace_name, assignee=None, assignee_object_id=None, ids=None, item=None, item_type=None, role=None, scope=None, yes=None):
'''
Delete role assignments of workspace.
Required Parameters:
- workspace_name -- The workspace name.
Optional Parameters:
- assignee -- Represent a user or service principal. Supported format: object id, user sign-in name, or service principal name.
- assignee_object_id -- Use this parameter instead of '--assignee' to bypass Graph API invocation in case of insufficient privileges. This parameter only works with object ids for users, groups, service principals, and managed identities. For managed identities use the principal id. For service principals, use the object id and not the app id.
- ids -- space-separated role assignment ids. You should not provide --role or --assignee when --ids is provided.
- item -- Item granted access in the workspace. Using with --item-type to combine the scope of assignment.Using az role assignment with filter condition before executing delete operation to be clearly aware of which assignments will be deleted.
- item_type -- Item type granted access in the workspace. Using with --item to combine the scope of assignment.Using az role assignment with filter condition before executing delete operation to be clearly aware of which assignments will be deleted.
- role -- The role name/id that is assigned to the principal.
- scope -- A scope defines the resources or artifacts that the access applies to. Synapse supports hierarchical scopes. Permissions granted at a higher-level scope are inherited by objects at a lower level. In Synapse RBAC, the top-level scope is a workspace. Using az role assignment with filter condition before executing delete operation to be clearly aware of which assignments will be deleted.
- yes -- Do not prompt for confirmation.
'''
return _call_az("az synapse role assignment delete", locals())
| 75.094595 | 402 | 0.756883 | 814 | 5,557 | 5.114251 | 0.162162 | 0.024982 | 0.026904 | 0.025943 | 0.844583 | 0.844583 | 0.844583 | 0.821523 | 0.790055 | 0.790055 | 0 | 0 | 0.181033 | 5,557 | 73 | 403 | 76.123288 | 0.914744 | 0.823466 | 0 | 0 | 0 | 0 | 0.164948 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.444444 | false | 0 | 0.111111 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
a35500c4e3a0c75d46efaf498aa4fa32d55b8d70 | 3,888 | py | Python | wrappers/python/tests/anoncreds/test_proof_predicates.py | sklump/indy-sdk | ee05a89ddf60b42f7483bebf2d89a936e12730df | [
"Apache-2.0"
] | 636 | 2017-05-25T07:45:43.000Z | 2022-03-23T22:30:34.000Z | wrappers/python/tests/anoncreds/test_proof_predicates.py | sklump/indy-sdk | ee05a89ddf60b42f7483bebf2d89a936e12730df | [
"Apache-2.0"
] | 731 | 2017-05-29T07:15:08.000Z | 2022-03-31T07:55:58.000Z | wrappers/python/tests/anoncreds/test_proof_predicates.py | sklump/indy-sdk | ee05a89ddf60b42f7483bebf2d89a936e12730df | [
"Apache-2.0"
] | 904 | 2017-05-25T07:45:49.000Z | 2022-03-31T07:43:31.000Z | import json
import pytest
from indy.anoncreds import generate_nonce, prover_create_proof, verifier_verify_proof
from indy import error
@pytest.mark.asyncio
async def test_proof_works_for_valid_predicates(
wallet_handle,
prepopulated_wallet,
gvt_schema_id,
gvt_schema,
master_secret_id,
id_credential_1,
issuer_1_gvt_cred_def_id
):
credential_def_json, _, _, _, _ = prepopulated_wallet
requested_credentials = {
"self_attested_attributes": {},
"requested_attributes": {
"attr1_referent": {"cred_id": id_credential_1, "revealed": True}
},
"requested_predicates": {
"predicate1_referent": {"cred_id": id_credential_1}
}
}
schemas = {
gvt_schema_id: gvt_schema
}
credential_defs = {
issuer_1_gvt_cred_def_id: json.loads(credential_def_json)
}
criteria = {
'<': 88,
'<=': 88,
'>=': 18,
'>': 18
}
for (pred, threshold) in criteria.items():
proof_req = {
"nonce": await generate_nonce(),
"name": "proof_req_1[age {} {}]".format(pred, threshold),
"version": "0.1",
"requested_attributes": {
"attr1_referent": {"name": "name"}
},
"requested_predicates": {
"predicate1_referent": {
"name": "age",
"p_type": pred,
"p_value": threshold
}
}
}
proof_json = await prover_create_proof(
wallet_handle,
json.dumps(proof_req),
json.dumps(requested_credentials),
master_secret_id,
json.dumps(schemas),
json.dumps(credential_defs),
"{}"
)
valid = await verifier_verify_proof(
json.dumps(proof_req),
proof_json,
json.dumps(schemas),
json.dumps(credential_defs),
"{}",
"{}"
)
assert valid
@pytest.mark.asyncio
async def test_proof_works_for_bad_predicate(
wallet_handle,
prepopulated_wallet,
gvt_schema_id,
gvt_schema,
master_secret_id,
id_credential_1,
issuer_1_gvt_cred_def_id
):
credential_def_json, _, _, _, _ = prepopulated_wallet
requested_credentials = {
"self_attested_attributes": {},
"requested_attributes": {
"attr1_referent": {"cred_id": id_credential_1, "revealed": True}
},
"requested_predicates": {
"predicate1_referent": {"cred_id": id_credential_1}
}
}
schemas = {
gvt_schema_id: gvt_schema
}
credential_defs = {
issuer_1_gvt_cred_def_id: json.loads(credential_def_json)
}
criteria = {
'==': 28,
'EQ': 28,
'!=': 48,
'NE': 48,
'LT': 88,
'LE': 88,
'GE': 18,
'GT': 18
}
for (pred, threshold) in criteria.items():
proof_req = {
"nonce": await generate_nonce(),
"name": "proof_req_1[age {} {}]".format(pred, threshold),
"version": "0.1",
"requested_attributes": {
"attr1_referent": {"name": "name"}
},
"requested_predicates": {
"predicate1_referent": {
"name": "age",
"p_type": pred,
"p_value": threshold
}
}
}
with pytest.raises(error.CommonInvalidStructure):
await prover_create_proof(
wallet_handle,
json.dumps(proof_req),
json.dumps(requested_credentials),
master_secret_id,
json.dumps(schemas),
json.dumps(credential_defs),
"{}"
)
| 26.27027 | 85 | 0.522119 | 359 | 3,888 | 5.245125 | 0.222841 | 0.052576 | 0.04461 | 0.047796 | 0.84599 | 0.84599 | 0.84599 | 0.825279 | 0.825279 | 0.780669 | 0 | 0.019465 | 0.365741 | 3,888 | 147 | 86 | 26.44898 | 0.74412 | 0 | 0 | 0.636364 | 1 | 0 | 0.141975 | 0.012346 | 0 | 0 | 0 | 0 | 0.007576 | 1 | 0 | false | 0 | 0.030303 | 0 | 0.030303 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a38846aa9d7825860fe20a25b7efa8a4efe961ba | 40 | py | Python | preprocess/__init__.py | gvil-research/leaf-segmentation-unet | 591c9be5085ff179dc3cec8c224e46fed86dd09f | [
"MIT"
] | 1 | 2022-02-28T01:40:30.000Z | 2022-02-28T01:40:30.000Z | preprocess/__init__.py | gvil-research/leaf-segmentation-unet | 591c9be5085ff179dc3cec8c224e46fed86dd09f | [
"MIT"
] | null | null | null | preprocess/__init__.py | gvil-research/leaf-segmentation-unet | 591c9be5085ff179dc3cec8c224e46fed86dd09f | [
"MIT"
] | null | null | null | from .generate_dataset import check_dir
| 20 | 39 | 0.875 | 6 | 40 | 5.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 40 | 1 | 40 | 40 | 0.916667 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a3891e6a237dee49e8a016d4c4ba5424bc437d1a | 2,139 | py | Python | config/end_user_views.py | NumanIbnMazid/snippetshub.com | 61141b0cbc0d7707fbc2687204774079b036ff5e | [
"MIT"
] | null | null | null | config/end_user_views.py | NumanIbnMazid/snippetshub.com | 61141b0cbc0d7707fbc2687204774079b036ff5e | [
"MIT"
] | null | null | null | config/end_user_views.py | NumanIbnMazid/snippetshub.com | 61141b0cbc0d7707fbc2687204774079b036ff5e | [
"MIT"
] | null | null | null | from django.views.generic import View, TemplateView
from django.shortcuts import render
from django.contrib import messages
# # -------------------------------------------------------------------
# # Home Page
# # -------------------------------------------------------------------
# class HomePageView(TemplateView):
# template_name = 'user_panel/pages/index.html'
# def get_context_data(self, **kwargs):
# context = super(HomePageView, self).get_context_data(**kwargs)
# context['meta_description'] = "snippetshub.com: A sophisticated web application containing a number of useful snippets. Services we provide like youtube video downloader, video converter, pdf converter, doc file converter, powerpoint file converter, image/video compressor, file resizer, fake data api and so on."
# context['meta_keywords'] = "youtube video downloader, file converter, image converter, file resizer, image resizer, fake data, fake api, free api, file compressor, video downloader, python, django, seo, youtube, facebook, facebook video downloader, web snippets, snippetshub"
# context["meta_author"] = "Numan Ibn Mazid"
# return context
class HomePageView(View):
def get(self, request, *args, **kwargs):
return render(request, "user_panel/pages/index.html")
def get_context_data(self, **kwargs):
context = super(HomePageView, self).get_context_data(**kwargs)
context['meta_description'] = "snippetshub.com: A sophisticated web application containing a number of useful snippets. Services we provide like youtube video downloader, video converter, pdf converter, doc file converter, powerpoint file converter, image/video compressor, file resizer, fake data api and so on."
context['meta_keywords'] = "youtube video downloader, file converter, image converter, file resizer, image resizer, fake data, fake api, free api, file compressor, video downloader, python, django, seo, youtube, facebook, facebook video downloader, web snippets, snippetshub"
context["meta_author"] = "Numan Ibn Mazid"
return context | 76.392857 | 323 | 0.683964 | 246 | 2,139 | 5.878049 | 0.300813 | 0.082988 | 0.038728 | 0.026279 | 0.846473 | 0.846473 | 0.846473 | 0.846473 | 0.846473 | 0.846473 | 0 | 0 | 0.170173 | 2,139 | 28 | 324 | 76.392857 | 0.814648 | 0.490884 | 0 | 0 | 0 | 0.166667 | 0.567039 | 0.02514 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.25 | 0.083333 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
a396f09d35bb6b8bfc7a66945b4838b73795435b | 221 | py | Python | addons/stock_account/tests/__init__.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/stock_account/tests/__init__.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/stock_account/tests/__init__.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | from . import test_account_move
from . import test_anglo_saxon_valuation_reconciliation_common
from . import test_stockvaluation
from . import test_stockvaluationlayer
from . import test_stock_valuation_layer_revaluation
| 36.833333 | 62 | 0.886878 | 28 | 221 | 6.535714 | 0.535714 | 0.273224 | 0.382514 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090498 | 221 | 5 | 63 | 44.2 | 0.910448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6e59b2e2fd626fd00f48c844ef72b24b049a64e6 | 59 | py | Python | src/py_to_mindustry/__init__.py | Vitmalok/PyToMindustry | 518e64b71836e417b8767074a140de5241d00d5f | [
"MIT"
] | null | null | null | src/py_to_mindustry/__init__.py | Vitmalok/PyToMindustry | 518e64b71836e417b8767074a140de5241d00d5f | [
"MIT"
] | null | null | null | src/py_to_mindustry/__init__.py | Vitmalok/PyToMindustry | 518e64b71836e417b8767074a140de5241d00d5f | [
"MIT"
] | null | null | null | from py_to_mindustry.py_to_mindustry import py_to_mindustry | 59 | 59 | 0.932203 | 11 | 59 | 4.454545 | 0.454545 | 0.244898 | 0.795918 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050847 | 59 | 1 | 59 | 59 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6e8bbbd5416bb5476044ee12d228f770e883da41 | 181 | py | Python | dymopy/__init__.py | bill-ash/dymopy | fc805f087a11e2755e5136673daaaa732253accb | [
"MIT"
] | null | null | null | dymopy/__init__.py | bill-ash/dymopy | fc805f087a11e2755e5136673daaaa732253accb | [
"MIT"
] | null | null | null | dymopy/__init__.py | bill-ash/dymopy | fc805f087a11e2755e5136673daaaa732253accb | [
"MIT"
] | null | null | null | from .client import Dymo
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
| 36.2 | 71 | 0.883978 | 19 | 181 | 8.368421 | 0.578947 | 0.201258 | 0.289308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011834 | 0.066298 | 181 | 4 | 72 | 45.25 | 0.928994 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
42e7186705e5adddbbdcd98892967d76af2113d7 | 431 | py | Python | mesfonctions.py | astrowalker2013/tennis-ia | 0536c604e5235b328c5a5996689ef1a146aa2a2c | [
"MIT"
] | null | null | null | mesfonctions.py | astrowalker2013/tennis-ia | 0536c604e5235b328c5a5996689ef1a146aa2a2c | [
"MIT"
] | null | null | null | mesfonctions.py | astrowalker2013/tennis-ia | 0536c604e5235b328c5a5996689ef1a146aa2a2c | [
"MIT"
] | null | null | null | def plus(element=3, element2=2):
#
# calcule element 1 et element2
print(elemen t,"+",element2,"=",element+element2)
# Test de ma fonction
if __name__ == "__name__":
plus(15,10)
def mult(element=3, element2=2):
#
# calcule element 1 et element2
print(element,"*",element2,"=",element*element2)
# Test de ma fonction
if __name__ == "__name__":
mult(5,10) | 23.944444 | 57 | 0.582367 | 53 | 431 | 4.433962 | 0.415094 | 0.191489 | 0.13617 | 0.144681 | 0.817021 | 0.817021 | 0.817021 | 0.817021 | 0.817021 | 0.817021 | 0 | 0.067524 | 0.278422 | 431 | 18 | 58 | 23.944444 | 0.688103 | 0.229698 | 0 | 0.25 | 0 | 0 | 0.06135 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
280a79c79bfc18d3341db0c8011fe8d1f28c3b2d | 10,714 | py | Python | VisualSti/GenerateVisualStimuli.py | CFP106022219/ReichardtFlow2 | f12a886006003b83b63dccf35f5319d9b601c6c5 | [
"MIT"
] | null | null | null | VisualSti/GenerateVisualStimuli.py | CFP106022219/ReichardtFlow2 | f12a886006003b83b63dccf35f5319d9b601c6c5 | [
"MIT"
] | null | null | null | VisualSti/GenerateVisualStimuli.py | CFP106022219/ReichardtFlow2 | f12a886006003b83b63dccf35f5319d9b601c6c5 | [
"MIT"
] | null | null | null | # coding: utf8
import numpy as np
import cv2 as cv
import matplotlib.pyplot as plt
# from VisualSti import borst as bs
import pandas as pd
import pickle
import gc
class sti:
def __init__(self):
self.width = 320
self.height = 240
self.fps = 120
self.sec = 10
self.contrast = 1.0
# self.maxt = self.fps*self.sec
# self.movie = np.zeros((self.height, self.width, self.maxt))
# sine grating parameter
self.wlen = 80
self.degr = 0
self.angl = (self.degr/180)*np.pi
self.V = 0
# self.V = self.wlen/self.fps
# self.V_t = np.ones(self.maxt) * self.V
# self.degr = 45
# self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vy = self.V*np.cos(self.Vdgr)
self.Vx = self.V*np.sin(self.Vdgr)
# genavi pareter
# self.avifname = 'out.avi'
def singrat(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt))))
self.movie = self.movie*self.contrast*0.5+0.5
gc.collect()
return self.movie
def grid(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
ang2 = np.pi/2
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt)))) * np.cos(2.0*np.pi*((np.sin(ang2)/self.wlen)*(yv-(0*tt)) + (np.cos(ang2)/(self.wlen))*(xv-(0*tt))))
self.movie = self.movie*self.contrast*0.5+0.5
gc.collect()
return self.movie
# rightward only
def gradient(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt))))
self.movie = self.movie*self.contrast*0.5+0.5
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = 1
self.movie[:,int((self.wlen)/2+(self.V*i)):,i] = 0
gc.collect()
return self.movie
def gradient_minus(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt)))+np.pi)
self.movie = self.movie*self.contrast*0.5+0.5
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = 0
self.movie[:,int((self.wlen)/2+(self.V*i)):,i] = 1
gc.collect()
return self.movie
def peak(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt)))+np.pi)
self.movie = self.movie*0.5+0.5
self.movie = self.movie*self.contrast
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = 0
self.movie[:,int((self.wlen)+(self.V*i)):,i] = 0
gc.collect()
return self.movie
def peak_minus(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt))))
self.movie = self.movie*0.5+0.5
self.movie = self.movie*self.contrast
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = 1
self.movie[:,int((self.wlen)+(self.V*i)):,i] = 1
gc.collect()
return self.movie
def peak_avg_bg(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt)))+np.pi)
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = -1
self.movie[:,int((self.wlen)+(self.V*i)):,i] = -1
self.movie = self.movie*self.contrast*0.25+0.5
gc.collect()
return self.movie
def peak_avg_bg_minus(self):
self.angl = (self.degr/180)*np.pi
self.Vdgr = self.angl
self.Vx = self.V*np.cos(self.Vdgr)
print ('Vx', self.Vx)
self.Vy = self.V*np.sin(self.Vdgr)
print ('Vy', self.Vy)
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movie = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt))))
for i in range(0,self.fps*self.sec):
self.movie[:,0:int((self.V*i)),i] = 1
self.movie[:,int((self.wlen)+(self.V*i)):,i] = 1
self.movie = self.movie*self.contrast*0.25+0.5
gc.collect()
return self.movie
def singrat_uint8(self):
self.Vx = self.V*np.cos(self.Vdgr)
self.Vy = self.V*np.sin(self.Vdgr)
self.angl = (self.degr/180)*np.pi
self.maxt = self.fps*self.sec
[xv, yv, tt] = np.meshgrid(range(0, self.width), range(0, self.height), range(0, self.maxt))
self.movieuint8 = np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt))))
self.movieuint8 = (((np.cos(2.0*np.pi*((np.sin(self.angl)/self.wlen)*(yv-(self.Vy*tt)) + (np.cos(self.angl)/(self.wlen))*(xv-(self.Vx*tt)))))*self.contrast+0.5)*255 + 0.5).astype(np.uint8)
# self.movieuint8 = self.movie*self.contrast*0.5+0.5
return self.movieuint8
def genavi(self, avifname = 'out.avi'):
self.avifname = avifname
normalizedImg = np.zeros((self.height, self.width))
out = cv.VideoWriter(self.avifname, cv.VideoWriter_fourcc(*'XVID'), self.fps, (self.width, self.height))
for i in range(self.maxt):
normalizedImg = self.movie[:,:,i] * 255 + 0.5
# normalizedImg = cv.normalize(self.movie[:,:,i], normalizedImg, 0, 255, cv.NORM_MINMAX)
normalizedImg = normalizedImg.astype(np.uint8)
normalizedImg = cv.cvtColor(normalizedImg,cv.COLOR_GRAY2RGB)
out.write(normalizedImg)
if cv.waitKey(1) & 0xFF == ord('q'):
break
gc.collect()
def genavi_null(self, avifname = 'out.avi'):
m_null = np.empty_like(self.movie)
print (self.maxt)
for i in range(self.maxt):
m_null[:,:,i] = self.movie[:,:,self.maxt-i-1]
self.avifname = avifname
normalizedImg = np.zeros((self.height, self.width))
out = cv.VideoWriter(self.avifname, cv.VideoWriter_fourcc(*'XVID'), self.fps, (self.width, self.height))
for i in range(self.maxt):
normalizedImg = m_null[:,:,i] * 255 + 0.5
# normalizedImg = cv.normalize(self.movie[:,:,i], normalizedImg, 0, 255, cv.NORM_MINMAX)
normalizedImg = normalizedImg.astype(np.uint8)
normalizedImg = cv.cvtColor(normalizedImg,cv.COLOR_GRAY2RGB)
out.write(normalizedImg)
if cv.waitKey(1) & 0xFF == ord('q'):
break
gc.collect()
def genavi_uint8(self, avifname = 'out.avi'):
self.avifname = avifname
normalizedImg = np.zeros((self.height, self.width))
out = cv.VideoWriter(self.avifname, cv.VideoWriter_fourcc(*'XVID'), self.fps, (self.width, self.height))
for i in range(self.maxt):
normalizedImg = self.movieuint8[:,:,i]
# normalizedImg = cv.normalize(self.movie[:,:,i], normalizedImg, 0, 255, cv.NORM_MINMAX)
normalizedImg = normalizedImg.astype(np.uint8)
normalizedImg = cv.cvtColor(normalizedImg,cv.COLOR_GRAY2RGB)
out.write(normalizedImg)
if cv.waitKey(1) & 0xFF == ord('q'):
break
def savpickle(self, clsfname = 'test.sti'):
self.classfname = clsfname
with open(self.classfname, 'wb') as output:
pickle.dump(self, output, pickle.HIGHEST_PROTOCOL)
def main():
pass
if __name__ == '__main__':
sw0 = sti()
sw0.wlen = 32
sw0.fps = 120
sw0.sec = 4
sw0.degr = 0
sw0.V = 0.5
sw = sw0.gradient_minus()
# sw0.genavi('out4.avi')
sw0.savpickle()
# plt.plot(sw0.movie[100,100,150:250])
# plt.show()
# with open('test.sti', 'rb') as input:
# sw0 = pickle.load(input)
# print (sw0.degr)
# main()
| 38.818841 | 240 | 0.563842 | 1,663 | 10,714 | 3.609741 | 0.082983 | 0.083958 | 0.07996 | 0.053307 | 0.839913 | 0.839247 | 0.823588 | 0.819257 | 0.795935 | 0.793603 | 0 | 0.03191 | 0.248273 | 10,714 | 275 | 241 | 38.96 | 0.713434 | 0.075509 | 0 | 0.706731 | 0 | 0 | 0.008704 | 0 | 0 | 0 | 0.001214 | 0 | 0 | 1 | 0.072115 | false | 0.004808 | 0.028846 | 0 | 0.149038 | 0.081731 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2875fa6110692e9ca4ad761d1433a7a3f512cffc | 22,615 | py | Python | tests/unit/test_parser.py | Geeks-Trident-LLC/dlquery | 1f1b2fa0d25d0ecbdd68691f317cfee0d6b4c8f1 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/test_parser.py | Geeks-Trident-LLC/dlquery | 1f1b2fa0d25d0ecbdd68691f317cfee0d6b4c8f1 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/test_parser.py | Geeks-Trident-LLC/dlquery | 1f1b2fa0d25d0ecbdd68691f317cfee0d6b4c8f1 | [
"BSD-3-Clause"
] | null | null | null | import pytest
# from dlapp import DLQuery
from dlapp.parser import SelectParser
@pytest.fixture
def data():
obj = {'a': 1.2, 'b': 3, 'c': 'abc xyz'}
yield obj
class TestSelectParser:
@pytest.mark.parametrize(
"data,statement,expected_columns, predicate_result",
[
( # case: select None
{'a': 1, 'b': 2}, # data
'', # select statement
[None], # expected_columns
None # predicate_result
),
( # case: select ALL
{'a': 1, 'b': 2}, # data
'SELECT *', # select statement
[], # expected_columns
None # predicate_result
),
( # case: select ALL
{'a': 1, 'b': 2}, # data
'__ALL__', # select statement
[], # expected_columns
None # predicate_result
),
( # case: select b where a = 1
{'a': 1, 'b': 2}, # data
'SELECT b where a eq 1', # select statement
['b'], # expected_columns
True # predicate_result
),
( # case: a, b where a = 1 (short format)
{'a': 1, 'b': 2}, # data
'a, b where a eq 1', # select statement
['a', 'b'], # expected_columns
True # predicate_result
),
( # case: select a, c where a != 1 or_ c eq 3
{'a': 1, 'b': 2, 'c': 3}, # data
'select a, c where a ne 1 or_ c eq 3', # select statement
['a', 'c'], # expected_columns
True # predicate_result
),
( # case: select a, c where a != 1 || c eq 3
{'a': 1, 'b': 2, 'c': 3}, # data
'select a, c where a ne 1 || c eq 3', # select statement
['a', 'c'], # expected_columns
True # predicate_result
),
( # case: select a, c where a = 1 and_ c eq 3
{'a': 1, 'b': 2, 'c': 3}, # data
'select a, c where a eq 1 and_ c eq 3', # select statement
['a', 'c'], # expected_columns
True # predicate_result
),
( # case: select a, c where a = 1 and_ c eq 3
{'a': 1, 'b': 2, 'c': 3}, # data
'select a, c where a eq 1 && c eq 3', # select statement
['a', 'c'], # expected_columns
True # predicate_result
),
( # case: a, select key name having space where "key having space" == 2
{'a': 1, 'key name having space': 2}, # data
'''select a, key name having space where "key name having space" == 2''', # select statement
['a', 'key name having space'], # expected_columns
True # predicate_result
),
( # case: select a, key name having space where 'key having space' == 2
{'a': 1, 'key name having space': 2}, # data
'''select a, key name having space where 'key name having space' == 2''', # select statement
['a', 'key name having space'], # expected_columns
True # predicate_result
),
]
)
def test_parse_statement(self, data, statement,
expected_columns, predicate_result):
obj = SelectParser(statement)
obj.parse_statement()
assert obj.columns == expected_columns
if obj.predicate is not None:
result = obj.predicate(data)
assert result == predicate_result
@pytest.mark.parametrize(
"data,statement",
[
(
{'a': True, 'b': 2}, # data
'select b where a is true', # select statement
),
(
{'a': 'True', 'b': 2}, # data
'select b where a is true', # select statement
),
(
{'a': False, 'b': 2}, # data
'select b where a is_not true', # select statement
),
(
{'a': False, 'b': 2}, # data
'select b where a is false', # select statement
),
(
{'a': 'False', 'b': 2}, # data
'select b where a is false', # select statement
),
(
{'a': True, 'b': 2}, # data
'select b where a is_not false', # select statement
),
(
{'a': '', 'b': 2}, # data
'select b where a is empty', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a is_not empty', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a isnot empty', # select statement
),
(
{'a': ' \t\n', 'b': 2}, # data
'select b where a is optional_empty', # select statement
),
(
{'a': '', 'b': 2}, # data
'select b where a is_not optional_empty', # select statement
),
(
{'a': '192.168.1.1', 'b': 2}, # data
'select b where a is ipv4_address', # select statement
),
(
{'a': '192.168.1.300', 'b': 2}, # data
'select b where a is_not ipv4_address', # select statement
),
(
{'a': '2001::1', 'b': 2}, # data
'select b where a is ipv6_address', # select statement
),
(
{'a': '2001::1/150', 'b': 2}, # data
'select b where a is_not ipv6_address', # select statement
),
(
{'a': '192.168.1.1', 'b': 2}, # data
'select b where a is ip_address', # select statement
),
(
{'a': '2001::1', 'b': 2}, # data
'select b where a is ip_address', # select statement
),
(
{'a': '192.168.1.300', 'b': 2}, # data
'select b where a is_not ip_address', # select statement
),
(
{'a': '2001::1/150', 'b': 2}, # data
'select b where a is_not ip_address', # select statement
),
(
{'a': '11:22:33:aa:bb:cc', 'b': 2}, # data
'select b where a is mac_address', # select statement
),
(
{'a': '11-22-33-aa-bb-cc', 'b': 2}, # data
'select b where a is mac_address', # select statement
),
(
{'a': '11 22 33 aa bb cc', 'b': 2}, # data
'select b where a is mac_address', # select statement
),
(
{'a': '11-22-21 12-30-21', 'b': 2}, # data
'select b where a is_not mac_address', # select statement
),
(
{'a': '11:22:33 12:59:55', 'b': 2}, # data
'select b where a is_not mac_address', # select statement
),
(
{'a': 'Loopback0', 'b': 2}, # data
'select b where a is loopback_interface', # select statement
),
(
{'a': 'lo0', 'b': 2}, # data
'select b where a is loopback_interface', # select statement
),
(
{'a': 'Bundle-Ether 1', 'b': 2}, # data
'select b where a is bundle_ethernet', # select statement
),
(
{'a': 'Bundle-Ether1.1', 'b': 2}, # data
'select b where a is bundle_ethernet', # select statement
),
(
{'a': 'BE1', 'b': 2}, # data
'select b where a is bundle_ethernet', # select statement
),
(
{'a': 'be 1.1', 'b': 2}, # data
'select b where a is bundle_ethernet', # select statement
),
(
{'a': 'Port-Channel 1', 'b': 2}, # data
'select b where a is port_channel', # select statement
),
(
{'a': 'po 1.1', 'b': 2}, # data
'select b where a is port_channel', # select statement
),
(
{'a': 'HundredGigE 0/0/0/0/1', 'b': 2}, # data
'select b where a is hundred_gigabit_ethernet', # select statement
),
(
{'a': 'Hu0/0/0/0/1.1', 'b': 2}, # data
'select b where a is hundred_gigabit_ethernet', # select statement
),
(
{'a': 'TenGigE 0/0/0/0/1', 'b': 2}, # data
'select b where a is ten_gigabit_ethernet', # select statement
),
(
{'a': 'Te0/0/0/0/1.1', 'b': 2}, # data
'select b where a is ten_gigabit_ethernet', # select statement
),
(
{'a': 'GigabitEthernet0/0/0/1', 'b': 2}, # data
'select b where a is gigabit_ethernet', # select statement
),
(
{'a': 'Gi0/0/0/1.1', 'b': 2}, # data
'select b where a is gigabit_ethernet', # select statement
),
(
{'a': 'FastEthernet0/13', 'b': 2}, # data
'select b where a is fast_ethernet', # select statement
),
(
{'a': 'Fa0/13.1', 'b': 2}, # data
'select b where a is fast_ethernet', # select statement
),
]
)
def test_parse_statement_validating_custom_keyword(self, data, statement):
obj = SelectParser(statement)
obj.parse_statement()
result = obj.predicate(data, on_exception=False)
assert result is True
@pytest.mark.parametrize(
"data,statement",
[
(
{'a': 'abc', 'b': 2}, # data
'select b where a match [a-z]+', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a match \\w+', # select statement
),
(
{'a': '+ - * ?', 'b': 2}, # data
'select b where a not_match [a-z]+', # select statement
),
(
{'a': '+ - * ?', 'b': 2}, # data
'select b where a not_match \\w+', # select statement
),
]
)
def test_parse_statement_validating_regular_expression(self, data, statement):
obj = SelectParser(statement)
obj.parse_statement()
result = obj.predicate(data, on_exception=False)
assert result is True
@pytest.mark.parametrize(
"data,statement",
[
#####################
# number comparison #
#####################
(
{'a': 20, 'b': 2}, # data
'select b where a > 10', # select statement
),
(
{'a': 20, 'b': 2}, # data
'select b where a gt 10', # select statement
),
(
{'a': '20', 'b': 2}, # data
'select b where a gt 10', # select statement
),
(
{'a': 20, 'b': 2}, # data
'select b where a >= 20.0', # select statement
),
(
{'a': 20, 'b': 2}, # data
'select b where a ge 20.0', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a < 10', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a lt 10', # select statement
),
(
{'a': 5.0, 'b': 2}, # data
'select b where a <= 5', # select statement
),
(
{'a': 5.0, 'b': 2}, # data
'select b where a le 5', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a == 5.0', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a eq 5.0', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a != 3', # select statement
),
(
{'a': 5, 'b': 2}, # data
'select b where a ne 3', # select statement
),
#####################
# string comparison #
#####################
(
{'a': 'abc', 'b': 2}, # data
'select b where a == abc', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a eq abc', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a != xyz', # select statement
),
(
{'a': 'abc', 'b': 2}, # data
'select b where a ne xyz', # select statement
),
(
{'a': 'first, last', 'b': 2}, # data
'select b where a contains first', # select statement
),
(
{'a': 'first, last', 'b': 2}, # data
'select b where a contain first', # select statement
),
(
{'a': 'first, last', 'b': 2}, # data
'select b where a not_contain middle', # select statement
),
(
{'a': 'first', 'b': 2}, # data
'select b where a belongs first, last', # select statement
),
(
{'a': 'first', 'b': 2}, # data
'select b where a belong first, last', # select statement
),
(
{'a': 'middle', 'b': 2}, # data
'select b where a not_belong first, last', # select statement
),
######################
# version comparison #
######################
(
{'a': 'b', 'b': '2'}, # data
'select a where a > version(a)', # select statement
),
(
{'a': 'b', 'b': '2'}, # data
'select a where a gt version(a)', # select statement
),
(
{'a': 'b', 'b': '2'}, # data
'select a where a gt version(a.b.c.d)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a >= version(6.3.0)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a ge version(6.3.0)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a < version(7.0.1)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a lt version(7.0.1)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a <= version(7.0.1-a)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a le version(7.0.1-a)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a == version(6.3.4)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a eq version(6.3.4)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a != version(6.3.5)', # select statement
),
(
{'a': '6.3.4', 'b': '2'}, # data
'select a where a ne version(6.3.5)', # select statement
),
###############################
# semantic version comparison #
###############################
(
{'a': '6.4.0', 'b': '2'}, # data
'select a where a gt semantic_version(6.3.9-a)', # select statement
),
(
{'a': '3.1.0', 'b': '2'}, # data
'select a where a ge semantic_version(2.9.9)', # select statement
),
(
{'a': '6.3.9', 'b': '2'}, # data
'select a where a lt semantic_version(6.4.0)', # select statement
),
(
{'a': '6.3.9', 'b': '2'}, # data
'select a where a le semantic_version(6.4.0)', # select statement
),
(
{'a': '1.0.1-a', 'b': '2'}, # data
'select a where a eq semantic_version(1.0.1-a)', # select statement
),
(
{'a': '6.3.9', 'b': '2'}, # data
'select a where a ne semantic_version(6.4.1)', # select statement
),
###############################
# datetime comparison #
###############################
(
{'a': '2021-06-05'}, # data
'select a where a == date(06/05/2021)', # select statement
),
(
{'a': '2021Jun05'}, # data
'select a where a == date(Jun 5, 2021)', # select statement
),
(
{'a': '03:30:50.000001 PM'}, # data
'select a where a > time(15:30:50)', # select statement
),
(
{'a': '03:30:50pm'}, # data
'select a where a == time(15:30:50)', # select statement
),
(
{'a': '06/06/2021'}, # data
'select a where a gt datetime(01/01/2021)', # select statement
),
(
{'a': '6-6-2021'}, # data
'select a where a gt datetime(Jan 1, 2021)', # select statement
),
(
{'a': '2021-06-14T08:30:00+00:00'}, # data
'select a where a > datetime(2021-06-14T07:30:00+00:00 iso=True)', # select statement
),
(
{'a': '2021Jun06 02:30:00 PM PDT'}, # data
'select a where a > datetime(Jan 1 10:30:00 AM PST 2021 timezone=PST: -28800, PDT: -25200)', # select statement
),
(
{'a': '2021Jun06 02:30:00 PM PDT'}, # data
'select a where a == datetime(Jun 6 14:30:00 AM PDT 2021 timezone=PST: -28800, PDT: -25200)', # select statement
),
]
)
def test_parse_statement_validating_operator(self, data, statement):
obj = SelectParser(statement)
obj.parse_statement()
result = obj.predicate(data, on_exception=False)
assert result is True
| 43.490385 | 130 | 0.331108 | 2,034 | 22,615 | 3.624877 | 0.083579 | 0.087074 | 0.206158 | 0.144853 | 0.864913 | 0.840363 | 0.803743 | 0.776889 | 0.727655 | 0.709209 | 0 | 0.059374 | 0.536768 | 22,615 | 519 | 131 | 43.574181 | 0.644425 | 0.144462 | 0 | 0.497984 | 0 | 0.006048 | 0.240439 | 0.016208 | 0 | 0 | 0 | 0 | 0.010081 | 1 | 0.010081 | false | 0 | 0.004032 | 0 | 0.016129 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
955c76afb59e167f4a0030617435714714a894cc | 123 | py | Python | examples/__init__.py | bliepp/bottle-rest | f4cb3e5811a382a9ab95decab5b8d69d1e6f8491 | [
"MIT"
] | null | null | null | examples/__init__.py | bliepp/bottle-rest | f4cb3e5811a382a9ab95decab5b8d69d1e6f8491 | [
"MIT"
] | null | null | null | examples/__init__.py | bliepp/bottle-rest | f4cb3e5811a382a9ab95decab5b8d69d1e6f8491 | [
"MIT"
] | null | null | null | from .additional_api import app as additional
from .dynamic_api import app as dynamic
from .simple_api import app as simple | 41 | 45 | 0.837398 | 21 | 123 | 4.761905 | 0.380952 | 0.27 | 0.36 | 0.42 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.138211 | 123 | 3 | 46 | 41 | 0.943396 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
958da81388793a8c9390d1ad452779958f5094b1 | 28,541 | py | Python | nebula/tests/test_event_model.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 2 | 2019-05-01T09:42:32.000Z | 2019-05-31T01:08:37.000Z | nebula/tests/test_event_model.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 1 | 2021-06-01T23:30:04.000Z | 2021-06-01T23:30:04.000Z | nebula/tests/test_event_model.py | threathunterX/nebula_web | 2e32e6e7b225e0bd87ee8c847c22862f12c51bb1 | [
"Apache-2.0"
] | 5 | 2019-05-14T09:30:12.000Z | 2020-09-29T04:57:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from nebula.dao.event_model_dao import EventModelDefaultDao
from nebula.dao.event_model_dao import EventModelCustDao
from nebula_meta.event_model import EventModel, add_event_to_registry
from base import WebTestCase
from .unittest_util import TestClassDBUtil, db_env
with open('nebula/tests/data/event_model.json') as json_file:
SAMPLE_EVENTS = json.load(json_file)
new_events = list()
for _ in SAMPLE_EVENTS:
new_event = EventModel.from_dict(_)
add_event_to_registry(new_event)
new_events.append(new_event)
SAMPLE_EVENTS = new_events
SAMPLE_EVENT = SAMPLE_EVENTS[0]
def prepare_events(event_dao):
event_dao.add_model(SAMPLE_EVENT)
# add with new app
new_event = SAMPLE_EVENT.copy()
new_event.app = 'new_app'
new_event.name = 'event1'
event_dao.add_model(new_event)
# add with new type
new_event = SAMPLE_EVENT.copy()
new_event.type = 'new_type'
new_event.name = 'event2'
event_dao.add_model(new_event)
def clear_events(event_dao):
event_dao.clear()
class TestDefaultEventModelListHandler(WebTestCase):
def get_handlers(self):
from nebula.views import event_model_default, event_model
return [(r"/default/event_models", event_model_default.EventModelListHandler)]
@classmethod
def setUpClass(cls):
super(TestDefaultEventModelListHandler, cls).setUpClass()
connection_string = 'mysql://%s:%s@%s:%s' % ('nebula', 'ThreathunterNebula', '127.0.0.1', '3306')
db_env.update_connect_string('%s/%s?charset=utf8' % (connection_string, 'nebula'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_data'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_default'))
db_env.init()
@classmethod
def tearDownClass(cls):
db_env.clear()
super(TestDefaultEventModelListHandler, cls).tearDownClass()
def setUp(self):
super(TestDefaultEventModelListHandler, self).setUp()
self.db_util = TestClassDBUtil()
self.db_util.setup()
import nebula.dao.base_dao
nebula.dao.base_dao.Global_Data_Session = self.db_util.get_data_session()
nebula.dao.base_dao.Global_Default_Session = self.db_util.get_default_session()
nebula.dao.base_dao.Global_Session = self.db_util.get_session()
self.event_dao = EventModelDefaultDao()
def tearDown(self):
self.db_util.teardown()
super(TestDefaultEventModelListHandler, self).tearDown()
def test_get_events(self):
prepare_events(self.event_dao)
url = "/default/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(self.event_dao.count(), 3)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 3)
return_events = res['values']
return_event = [_ for _ in return_events if _['name'] == SAMPLE_EVENT.name][0]
self.assertEqual(EventModel.from_dict(return_event), SAMPLE_EVENT)
# check get by app
response = self.fetch(url + '?app=new_app')
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# check get by type
response = self.fetch(url + '?type=new_type')
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
def test_delete_events(self):
prepare_events(self.event_dao)
url = "/default/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 1 delete by type
# 1.1 get
url = "/default/event_models?type=new_type"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# 1.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
# 2 delete by app
# 2.1 get
url = "/default/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# 2.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
clear_events(self.event_dao)
prepare_events(self.event_dao)
# 3. delete all
# 3.1 get
url = "/default/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 3.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 3.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
def test_modify_events(self):
prepare_events(self.event_dao)
# use test event in 'new_app' to check
new_app_event = SAMPLE_EVENT.copy()
new_app_event.app = 'new_app'
new_app_event.name = 'event1'
# first check
url = "/default/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(new_app_event, get_event)
# 1. modify event and test
# 1.1 modify test event, so it is doesn't equal now
new_app_event.remark = 'new_remark'
self.assertNotEquals(new_app_event, get_event)
# 1.2 post the modified event
url = "/default/event_models"
response = self.fetch(url, method='POST', body='[%s]' % new_app_event.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 verify again
url = "/default/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
# now equal again
self.assertEqual(new_app_event, get_event)
# 2. add an event with post
# 2.1 now there are 3 events
url = "/default/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 2.2 add one
added_event = SAMPLE_EVENT.copy()
added_event.name = 'added_event'
response = self.fetch(url, method='POST', body='[%s]' % added_event.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 now there are 4
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 4)
def test_add_events(self):
event1_dict = {
'name': 'event1',
}
event2_dict = {
'name': 'event2',
'source': [
{
'app': '',
'name': 'event1'
}
]
}
url = "/default/event_models"
request_content = json.dumps([event1_dict, event2_dict])
response = self.fetch(url, method='POST', body=request_content)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
class TestDefaultEventQueryHandler(WebTestCase):
def get_handlers(self):
from nebula.views import event_model_default, event_model
return [(r"/default/event_models/event/(.*)/(.*)", event_model_default.EventQueryHandler)]
@classmethod
def setUpClass(cls):
super(TestDefaultEventQueryHandler, cls).setUpClass()
connection_string = 'mysql://%s:%s@%s:%s' % ('nebula', 'ThreathunterNebula', '127.0.0.1', '3306')
db_env.update_connect_string('%s/%s?charset=utf8' % (connection_string, 'nebula'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_data'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_default'))
db_env.init()
@classmethod
def tearDownClass(cls):
db_env.clear()
super(TestDefaultEventQueryHandler, cls).tearDownClass()
def setUp(self):
super(TestDefaultEventQueryHandler, self).setUp()
self.db_util = TestClassDBUtil()
self.db_util.setup()
import nebula.dao.base_dao
nebula.dao.base_dao.Global_Data_Session = self.db_util.get_data_session()
nebula.dao.base_dao.Global_Default_Session = self.db_util.get_default_session()
nebula.dao.base_dao.Global_Session = self.db_util.get_session()
self.event_dao = EventModelDefaultDao()
def tearDown(self):
self.db_util.teardown()
super(TestDefaultEventQueryHandler, self).tearDown()
def test_get_event(self):
prepare_events(self.event_dao)
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
self.assertEqual(EventModel.from_dict(res['values'][0]), SAMPLE_EVENT)
# get not exist
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, 'not_exist')
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
def test_delete_event(self):
prepare_events(self.event_dao)
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
self.assertEqual(EventModel.from_dict(res['values'][0]), SAMPLE_EVENT)
# delete
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
def tests_modify_event(self):
prepare_events(self.event_dao)
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, SAMPLE_EVENT)
# 1. modify SAMPLE_EVENT
# 1.1 first not equal
new_sample = SAMPLE_EVENT.copy()
new_sample.remark = 'modified'
self.assertNotEquals(get_event, new_sample)
# 1.2 second modify
response = self.fetch(url, method='POST', body=new_sample.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 third check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, new_sample)
# 2. add a new one
# 2.1 get 404
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, 'event_to_be_add')
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
# 2.2 add one
event_to_be_add = SAMPLE_EVENT.copy()
event_to_be_add.name = 'event_to_be_add'
response = self.fetch(url, method='POST', body=event_to_be_add.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, event_to_be_add)
class TestEventModelListHandler(WebTestCase):
def get_handlers(self):
from nebula.views import event_model_default, event_model
return [(r"/platform/event_models", event_model.EventListHandler),
(r"/default/event_models", event_model_default.EventModelListHandler)]
@classmethod
def setUpClass(cls):
super(TestEventModelListHandler, cls).setUpClass()
connection_string = 'mysql://%s:%s@%s:%s' % ('nebula', 'ThreathunterNebula', '127.0.0.1', '3306')
db_env.update_connect_string('%s/%s?charset=utf8' % (connection_string, 'nebula'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_data'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_default'))
db_env.init()
@classmethod
def tearDownClass(cls):
db_env.clear()
super(TestEventModelListHandler, cls).tearDownClass()
def setUp(self):
super(TestEventModelListHandler, self).setUp()
self.db_util = TestClassDBUtil()
self.db_util.setup()
import nebula.dao.base_dao
nebula.dao.base_dao.Global_Data_Session = self.db_util.get_data_session()
nebula.dao.base_dao.Global_Default_Session = self.db_util.get_default_session()
nebula.dao.base_dao.Global_Session = self.db_util.get_session()
self.default_dao = EventModelDefaultDao()
self.cust_dao = EventModelCustDao()
def tearDown(self):
self.db_util.teardown()
super(TestEventModelListHandler, self).tearDown()
def test_get_events(self):
prepare_events(self.cust_dao)
# default 0, cust 3
url = "/default/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(self.default_dao.count(), 0)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 0)
url = '/platform/event_models'
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(self.cust_dao.count(), 3)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 3)
return_events = res['values']
return_event = [_ for _ in return_events if _['name'] == SAMPLE_EVENT.name][0]
self.assertEqual(EventModel.from_dict(return_event), SAMPLE_EVENT)
# check get by app
response = self.fetch(url + '?app=new_app')
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# check get by type
response = self.fetch(url + '?type=new_type')
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
def test_delete_events(self):
prepare_events(self.cust_dao)
url = "/platform/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 1 delete by type
# 1.1 get
url = "/platform/event_models?type=new_type"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# 1.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
# 2 delete by app
# 2.1 get
url = "/platform/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
# 2.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
clear_events(self.cust_dao)
prepare_events(self.cust_dao)
# 3. delete all
# 3.1 get
url = "/platform/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 3.2 delete
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 3.3 verify
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 0)
# 4. check only affect cust dao
# 4.1 add event in both table
clear_events(self.default_dao)
clear_events(self.cust_dao)
prepare_events(self.default_dao)
prepare_events(self.cust_dao)
# 4.2 each has 3
self.assertEqual(self.default_dao.count(), 3)
self.assertEqual(self.cust_dao.count(), 3)
# 4.3 delete all
url = '/platform/event_models'
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 4.4 now cust is empty
self.assertEqual(self.default_dao.count(), 3)
self.assertEqual(self.cust_dao.count(), 0)
def test_modify_events(self):
prepare_events(self.cust_dao)
# use test event in 'new_app' to check
new_app_event = SAMPLE_EVENT.copy()
new_app_event.app = 'new_app'
new_app_event.name = 'event1'
# first check
url = "/platform/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(new_app_event, get_event)
# 1. modify event and test
# 1.1 modify test event, so it is doesn't equal now
new_app_event.remark = 'new_remark'
self.assertNotEquals(new_app_event, get_event)
# 1.2 post the modified event
url = "/platform/event_models"
response = self.fetch(url, method='POST', body='[%s]' % new_app_event.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 verify again
url = "/platform/event_models?app=new_app"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
# now equal again
self.assertEqual(new_app_event, get_event)
# 2. add an event with post
# 2.1 now there are 3 events
url = "/platform/event_models"
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 3)
# 2.2 add one
added_event = SAMPLE_EVENT.copy()
added_event.name = 'added_event'
response = self.fetch(url, method='POST', body='[%s]' % added_event.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 now there are 4
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(len(res['values']), 4)
# 3. modify only affect cust table
clear_events(self.default_dao)
clear_events(self.cust_dao)
prepare_events(self.default_dao)
modified_event = SAMPLE_EVENT.copy()
modified_event.remark = 'modified'
response = self.fetch(url, method='POST', body='[%s]' % modified_event.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 4. default keeps the same, cust is updated
self.assertEqual(self.cust_dao.get_model_by_app_name(SAMPLE_EVENT.app, SAMPLE_EVENT.name), modified_event)
self.assertNotEqual(self.default_dao.get_model_by_app_name(SAMPLE_EVENT.app, SAMPLE_EVENT.name), modified_event)
def test_add_events(self):
event1_dict = {
'name': 'event1',
}
event2_dict = {
'name': 'event2',
'source': [
{
'app': '',
'name': 'event1'
}
]
}
url = "/platform/event_models"
request_content = json.dumps([event1_dict, event2_dict])
response = self.fetch(url, method='POST', body=request_content)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
class TestEventQueryHandler(WebTestCase):
def get_handlers(self):
from nebula.views import event_model_default, event_model
return [(r"/default/event_models/event/(.*)/(.*)", event_model_default.EventQueryHandler),
(r"/platform/event_models/event/(.*)/(.*)", event_model.EventQueryHandler)]
@classmethod
def setUpClass(cls):
super(TestEventQueryHandler, cls).setUpClass()
connection_string = 'mysql://%s:%s@%s:%s' % ('nebula', 'ThreathunterNebula', '127.0.0.1', '3306')
db_env.update_connect_string('%s/%s?charset=utf8' % (connection_string, 'nebula'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_data'),
'%s/%s?charset=utf8' % (connection_string, 'nebula_default'))
db_env.init()
@classmethod
def tearDownClass(cls):
db_env.clear()
super(TestEventQueryHandler, cls).tearDownClass()
def setUp(self):
super(TestEventQueryHandler, self).setUp()
self.db_util = TestClassDBUtil()
self.db_util.setup()
import nebula.dao.base_dao
nebula.dao.base_dao.Global_Data_Session = self.db_util.get_data_session()
nebula.dao.base_dao.Global_Default_Session = self.db_util.get_default_session()
nebula.dao.base_dao.Global_Session = self.db_util.get_session()
self.default_dao = EventModelDefaultDao()
self.cust_dao = EventModelCustDao()
def tearDown(self):
self.db_util.teardown()
super(TestEventQueryHandler, self).tearDown()
def test_get_event(self):
prepare_events(self.cust_dao)
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
self.assertEqual(EventModel.from_dict(res['values'][0]), SAMPLE_EVENT)
# not in default
url = "/default/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
# get not exist
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, 'not_exist')
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
def test_delete_event(self):
prepare_events(self.cust_dao)
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
self.assertEqual(EventModel.from_dict(res['values'][0]), SAMPLE_EVENT)
# delete
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
# can not delete default
clear_events(self.cust_dao)
prepare_events(self.default_dao)
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
def tests_modify_event(self):
prepare_events(self.cust_dao)
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, SAMPLE_EVENT.name)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, SAMPLE_EVENT)
# 1. modify SAMPLE_EVENT
# 1.1 first not equal
new_sample = SAMPLE_EVENT.copy()
new_sample.remark = 'modified'
self.assertNotEquals(get_event, new_sample)
# 1.2 second modify
response = self.fetch(url, method='POST', body=new_sample.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 1.3 third check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, new_sample)
# 2. add a new one
# 2.1 get 404
url = "/platform/event_models/event/{}/{}".format(SAMPLE_EVENT.app, 'event_to_be_add')
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 404)
# 2.2 add one
event_to_be_add = SAMPLE_EVENT.copy()
event_to_be_add.name = 'event_to_be_add'
response = self.fetch(url, method='POST', body=event_to_be_add.get_json())
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
# 2.3 check
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
get_event = EventModel.from_dict(res['values'][0])
self.assertEqual(get_event, event_to_be_add)
| 37.406291 | 120 | 0.613328 | 3,510 | 28,541 | 4.821083 | 0.046154 | 0.123212 | 0.080842 | 0.080369 | 0.916617 | 0.907162 | 0.888252 | 0.879565 | 0.87206 | 0.863373 | 0 | 0.014586 | 0.250517 | 28,541 | 762 | 121 | 37.455381 | 0.776495 | 0.053082 | 0 | 0.867857 | 0 | 0 | 0.112361 | 0.046474 | 0 | 0 | 0 | 0 | 0.257143 | 1 | 0.064286 | false | 0 | 0.025 | 0 | 0.103571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
95b44b9f59908d6deba425d327ea26aa0def0878 | 98 | py | Python | oc_chess_club/__init__.py | PabloLec/oc_chess_club | 69a6ce3111afadce73710d314579af6e6f0cbce6 | [
"MIT"
] | null | null | null | oc_chess_club/__init__.py | PabloLec/oc_chess_club | 69a6ce3111afadce73710d314579af6e6f0cbce6 | [
"MIT"
] | null | null | null | oc_chess_club/__init__.py | PabloLec/oc_chess_club | 69a6ce3111afadce73710d314579af6e6f0cbce6 | [
"MIT"
] | 1 | 2021-07-15T06:49:39.000Z | 2021-07-15T06:49:39.000Z | import oc_chess_club.views.typer_app as _TYPER_APP
def main():
_TYPER_APP._MAIN_TYPER_APP()
| 16.333333 | 50 | 0.785714 | 17 | 98 | 3.941176 | 0.588235 | 0.477612 | 0.358209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.132653 | 98 | 5 | 51 | 19.6 | 0.788235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
95d800c43700fd194d5776006614a7444bf61651 | 1,617 | py | Python | care/facility/migrations/0078_auto_20200409_0436.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 189 | 2020-03-17T17:18:58.000Z | 2022-02-22T09:49:45.000Z | care/facility/migrations/0078_auto_20200409_0436.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 598 | 2020-03-19T21:22:09.000Z | 2022-03-30T05:08:37.000Z | care/facility/migrations/0078_auto_20200409_0436.py | gigincg/care | 07be6a7982b5c46a854e3435a52662f32800c8ae | [
"MIT"
] | 159 | 2020-03-19T18:45:56.000Z | 2022-03-17T13:23:12.000Z | # Generated by Django 2.2.11 on 2020-04-08 23:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('facility', '0077_auto_20200409_0422'),
]
operations = [
migrations.AddField(
model_name='historicalpatientregistration',
name='aadhar_no',
field=models.CharField(default='', max_length=255, verbose_name='Aadhar Number of Patient'),
),
migrations.AddField(
model_name='historicalpatientregistration',
name='nationality',
field=models.CharField(default='', max_length=255, verbose_name='Nationality of Patient'),
),
migrations.AddField(
model_name='historicalpatientregistration',
name='passport_no',
field=models.CharField(default='', max_length=255, verbose_name='Passport Number of Foreign Patients'),
),
migrations.AddField(
model_name='patientregistration',
name='aadhar_no',
field=models.CharField(default='', max_length=255, verbose_name='Aadhar Number of Patient'),
),
migrations.AddField(
model_name='patientregistration',
name='nationality',
field=models.CharField(default='', max_length=255, verbose_name='Nationality of Patient'),
),
migrations.AddField(
model_name='patientregistration',
name='passport_no',
field=models.CharField(default='', max_length=255, verbose_name='Passport Number of Foreign Patients'),
),
]
| 36.75 | 115 | 0.623995 | 153 | 1,617 | 6.431373 | 0.300654 | 0.109756 | 0.140244 | 0.164634 | 0.845528 | 0.845528 | 0.74187 | 0.74187 | 0.644309 | 0.644309 | 0 | 0.041982 | 0.263451 | 1,617 | 43 | 116 | 37.604651 | 0.784215 | 0.028448 | 0 | 0.810811 | 1 | 0 | 0.254302 | 0.070108 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.108108 | 0.027027 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
95fd0cc3e39eb539d118b321438027e0ad44ed27 | 169 | py | Python | app/tools/dummy_tool.py | Polo3cat/amar-pargo | f7457e93a76a98e0ba9c711fdbda44debd336dbb | [
"Apache-2.0"
] | null | null | null | app/tools/dummy_tool.py | Polo3cat/amar-pargo | f7457e93a76a98e0ba9c711fdbda44debd336dbb | [
"Apache-2.0"
] | null | null | null | app/tools/dummy_tool.py | Polo3cat/amar-pargo | f7457e93a76a98e0ba9c711fdbda44debd336dbb | [
"Apache-2.0"
] | null | null | null | from .tool import Tool
class DummyTool(Tool):
def __init__(self):
pass
def evaluate(self, screenshot):
return 0
def act(self, screenshot):
return 'wait', 1
| 13 | 32 | 0.698225 | 24 | 169 | 4.75 | 0.666667 | 0.245614 | 0.350877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014706 | 0.195266 | 169 | 12 | 33 | 14.083333 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0.023669 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.375 | false | 0.125 | 0.125 | 0.25 | 0.875 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 7 |
25417c6b0451269f76d6ad3edc039a2bbaa87cdf | 166,256 | py | Python | nova/tests/unit/conductor/test_conductor.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/conductor/test_conductor.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/conductor/test_conductor.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright 2012 IBM Corp.'
nl|'\n'
comment|'# Copyright 2013 Red Hat, Inc.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
string|'"""Tests for the conductor service."""'
newline|'\n'
nl|'\n'
name|'import'
name|'copy'
newline|'\n'
name|'import'
name|'uuid'
newline|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'from'
name|'mox3'
name|'import'
name|'mox'
newline|'\n'
name|'import'
name|'oslo_messaging'
name|'as'
name|'messaging'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'timeutils'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'flavors'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'rpcapi'
name|'as'
name|'compute_rpcapi'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'task_states'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'compute'
name|'import'
name|'vm_states'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'conductor'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'conductor'
name|'import'
name|'api'
name|'as'
name|'conductor_api'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'conductor'
name|'import'
name|'manager'
name|'as'
name|'conductor_manager'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'conductor'
name|'import'
name|'rpcapi'
name|'as'
name|'conductor_rpcapi'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'conductor'
op|'.'
name|'tasks'
name|'import'
name|'live_migrate'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'conductor'
op|'.'
name|'tasks'
name|'import'
name|'migrate'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'db'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
name|'as'
name|'exc'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'image'
name|'import'
name|'api'
name|'as'
name|'image_api'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'objects'
name|'import'
name|'base'
name|'as'
name|'obj_base'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'objects'
name|'import'
name|'fields'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'rpc'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'scheduler'
name|'import'
name|'client'
name|'as'
name|'scheduler_client'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'scheduler'
name|'import'
name|'utils'
name|'as'
name|'scheduler_utils'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
name|'import'
name|'fixtures'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'fakes'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'cast_as_call'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'compute'
name|'import'
name|'test_compute'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_instance'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_notifier'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_request_spec'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_server_actions'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
name|'import'
name|'fake_utils'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
name|'import'
name|'uuidsentinel'
name|'as'
name|'uuids'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeContext
name|'class'
name|'FakeContext'
op|'('
name|'context'
op|'.'
name|'RequestContext'
op|')'
op|':'
newline|'\n'
DECL|member|elevated
indent|' '
name|'def'
name|'elevated'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Return a consistent elevated context so we can detect it."""'
newline|'\n'
name|'if'
name|'not'
name|'hasattr'
op|'('
name|'self'
op|','
string|"'_elevated'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_elevated'
op|'='
name|'super'
op|'('
name|'FakeContext'
op|','
name|'self'
op|')'
op|'.'
name|'elevated'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
name|'self'
op|'.'
name|'_elevated'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|_BaseTestCase
dedent|''
dedent|''
name|'class'
name|'_BaseTestCase'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'_BaseTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
newline|'\n'
name|'self'
op|'.'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'FakeContext'
op|'('
name|'self'
op|'.'
name|'user_id'
op|','
name|'self'
op|'.'
name|'project_id'
op|')'
newline|'\n'
nl|'\n'
name|'fake_notifier'
op|'.'
name|'stub_notifier'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'addCleanup'
op|'('
name|'fake_notifier'
op|'.'
name|'reset'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_deserialize_context
name|'def'
name|'fake_deserialize_context'
op|'('
name|'serializer'
op|','
name|'ctxt_dict'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'user_id'
op|','
name|'ctxt_dict'
op|'['
string|"'user_id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
op|','
name|'ctxt_dict'
op|'['
string|"'project_id'"
op|']'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'context'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'rpc'
op|'.'
name|'RequestContextSerializer'
op|','
string|"'deserialize_context'"
op|','
nl|'\n'
name|'fake_deserialize_context'
op|')'
newline|'\n'
nl|'\n'
name|'fake_utils'
op|'.'
name|'stub_out_utils_spawn_n'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorTestCase
dedent|''
dedent|''
name|'class'
name|'ConductorTestCase'
op|'('
name|'_BaseTestCase'
op|','
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor Manager Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_manager'
op|'.'
name|'ConductorManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor'
newline|'\n'
nl|'\n'
DECL|member|_test_object_action
dedent|''
name|'def'
name|'_test_object_action'
op|'('
name|'self'
op|','
name|'is_classmethod'
op|','
name|'raise_exception'
op|')'
op|':'
newline|'\n'
DECL|class|TestObject
indent|' '
name|'class'
name|'TestObject'
op|'('
name|'obj_base'
op|'.'
name|'NovaObject'
op|')'
op|':'
newline|'\n'
DECL|member|foo
indent|' '
name|'def'
name|'foo'
op|'('
name|'self'
op|','
name|'raise_exception'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'raise_exception'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'test'"
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'test'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|bar
name|'def'
name|'bar'
op|'('
name|'cls'
op|','
name|'context'
op|','
name|'raise_exception'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'raise_exception'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'Exception'
op|'('
string|"'test'"
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'test'"
newline|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'obj_base'
op|'.'
name|'NovaObjectRegistry'
op|'.'
name|'register'
op|'('
name|'TestObject'
op|')'
newline|'\n'
nl|'\n'
name|'obj'
op|'='
name|'TestObject'
op|'('
op|')'
newline|'\n'
comment|'# NOTE(danms): After a trip over RPC, any tuple will be a list,'
nl|'\n'
comment|'# so use a list here to make sure we can handle it'
nl|'\n'
name|'fake_args'
op|'='
op|'['
op|']'
newline|'\n'
name|'if'
name|'is_classmethod'
op|':'
newline|'\n'
DECL|variable|versions
indent|' '
name|'versions'
op|'='
op|'{'
string|"'TestObject'"
op|':'
string|"'1.0'"
op|'}'
newline|'\n'
DECL|variable|result
name|'result'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'object_class_action_versions'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'TestObject'
op|'.'
name|'obj_name'
op|'('
op|')'
op|','
string|"'bar'"
op|','
name|'versions'
op|','
nl|'\n'
name|'fake_args'
op|','
op|'{'
string|"'raise_exception'"
op|':'
name|'raise_exception'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'updates'
op|','
name|'result'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'object_action'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'obj'
op|','
string|"'foo'"
op|','
name|'fake_args'
op|','
nl|'\n'
op|'{'
string|"'raise_exception'"
op|':'
name|'raise_exception'
op|'}'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'test'"
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_action
dedent|''
name|'def'
name|'test_object_action'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_object_action'
op|'('
name|'False'
op|','
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_action_on_raise
dedent|''
name|'def'
name|'test_object_action_on_raise'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_test_object_action'
op|','
name|'False'
op|','
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_class_action
dedent|''
name|'def'
name|'test_object_class_action'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_object_action'
op|'('
name|'True'
op|','
name|'False'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_class_action_on_raise
dedent|''
name|'def'
name|'test_object_class_action_on_raise'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'ExpectedException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'_test_object_action'
op|','
name|'True'
op|','
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_action_copies_object
dedent|''
name|'def'
name|'test_object_action_copies_object'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|class|TestObject
indent|' '
name|'class'
name|'TestObject'
op|'('
name|'obj_base'
op|'.'
name|'NovaObject'
op|')'
op|':'
newline|'\n'
DECL|variable|fields
indent|' '
name|'fields'
op|'='
op|'{'
string|"'dict'"
op|':'
name|'fields'
op|'.'
name|'DictOfStringsField'
op|'('
op|')'
op|'}'
newline|'\n'
nl|'\n'
DECL|member|touch_dict
name|'def'
name|'touch_dict'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'dict'
op|'['
string|"'foo'"
op|']'
op|'='
string|"'bar'"
newline|'\n'
name|'self'
op|'.'
name|'obj_reset_changes'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'obj_base'
op|'.'
name|'NovaObjectRegistry'
op|'.'
name|'register'
op|'('
name|'TestObject'
op|')'
newline|'\n'
nl|'\n'
name|'obj'
op|'='
name|'TestObject'
op|'('
op|')'
newline|'\n'
name|'obj'
op|'.'
name|'dict'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'obj'
op|'.'
name|'obj_reset_changes'
op|'('
op|')'
newline|'\n'
name|'updates'
op|','
name|'result'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'object_action'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'obj'
op|','
string|"'touch_dict'"
op|','
name|'tuple'
op|'('
op|')'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
comment|'# NOTE(danms): If conductor did not properly copy the object, then'
nl|'\n'
comment|'# the new and reference copies of the nested dict object will be'
nl|'\n'
comment|"# the same, and thus 'dict' will not be reported as changed"
nl|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'dict'"
op|','
name|'updates'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|'}'
op|','
name|'updates'
op|'['
string|"'dict'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_object_class_action_versions
dedent|''
name|'def'
name|'test_object_class_action_versions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'obj_base'
op|'.'
name|'NovaObjectRegistry'
op|'.'
name|'register'
newline|'\n'
DECL|class|TestObject
name|'class'
name|'TestObject'
op|'('
name|'obj_base'
op|'.'
name|'NovaObject'
op|')'
op|':'
newline|'\n'
DECL|variable|VERSION
indent|' '
name|'VERSION'
op|'='
string|"'1.10'"
newline|'\n'
nl|'\n'
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|foo
name|'def'
name|'foo'
op|'('
name|'cls'
op|','
name|'context'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'cls'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'versions'
op|'='
op|'{'
nl|'\n'
string|"'TestObject'"
op|':'
string|"'1.2'"
op|','
nl|'\n'
string|"'OtherObj'"
op|':'
string|"'1.0'"
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
nl|'\n'
string|"'_object_dispatch'"
op|')'
name|'as'
name|'m'
op|':'
newline|'\n'
indent|' '
name|'m'
op|'.'
name|'return_value'
op|'='
name|'TestObject'
op|'('
op|')'
newline|'\n'
name|'m'
op|'.'
name|'return_value'
op|'.'
name|'obj_to_primitive'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'object_class_action_versions'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'TestObject'
op|'.'
name|'obj_name'
op|'('
op|')'
op|','
string|"'foo'"
op|','
name|'versions'
op|','
nl|'\n'
name|'tuple'
op|'('
op|')'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'m'
op|'.'
name|'return_value'
op|'.'
name|'obj_to_primitive'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'target_version'
op|'='
string|"'1.2'"
op|','
name|'version_manifest'
op|'='
name|'versions'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_reset
dedent|''
dedent|''
name|'def'
name|'test_reset'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Service'
op|','
string|"'clear_min_version_cache'"
nl|'\n'
op|')'
name|'as'
name|'mock_clear_cache'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
name|'mock_clear_cache'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_provider_fw_rule_get_all
dedent|''
dedent|''
name|'def'
name|'test_provider_fw_rule_get_all'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'provider_fw_rule_get_all'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'['
op|']'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorRPCAPITestCase
dedent|''
dedent|''
name|'class'
name|'ConductorRPCAPITestCase'
op|'('
name|'_BaseTestCase'
op|','
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor RPC API Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorRPCAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_service'
op|'='
name|'self'
op|'.'
name|'start_service'
op|'('
nl|'\n'
string|"'conductor'"
op|','
name|'manager'
op|'='
string|"'nova.conductor.manager.ConductorManager'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor_service'
op|'.'
name|'manager'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_rpcapi'
op|'.'
name|'ConductorAPI'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorAPITestCase
dedent|''
dedent|''
name|'class'
name|'ConductorAPITestCase'
op|'('
name|'_BaseTestCase'
op|','
name|'test'
op|'.'
name|'TestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor API Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_service'
op|'='
name|'self'
op|'.'
name|'start_service'
op|'('
nl|'\n'
string|"'conductor'"
op|','
name|'manager'
op|'='
string|"'nova.conductor.manager.ConductorManager'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_api'
op|'.'
name|'API'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor_service'
op|'.'
name|'manager'
newline|'\n'
nl|'\n'
DECL|member|test_wait_until_ready
dedent|''
name|'def'
name|'test_wait_until_ready'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'timeouts'
op|'='
op|'['
op|']'
newline|'\n'
name|'calls'
op|'='
name|'dict'
op|'('
name|'count'
op|'='
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_ping
name|'def'
name|'fake_ping'
op|'('
name|'context'
op|','
name|'message'
op|','
name|'timeout'
op|')'
op|':'
newline|'\n'
indent|' '
name|'timeouts'
op|'.'
name|'append'
op|'('
name|'timeout'
op|')'
newline|'\n'
name|'calls'
op|'['
string|"'count'"
op|']'
op|'+='
number|'1'
newline|'\n'
name|'if'
name|'calls'
op|'['
string|"'count'"
op|']'
op|'<'
number|'15'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'messaging'
op|'.'
name|'MessagingTimeout'
op|'('
string|'"fake"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'base_rpcapi'
op|','
string|"'ping'"
op|','
name|'fake_ping'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'wait_until_ready'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'timeouts'
op|'.'
name|'count'
op|'('
number|'10'
op|')'
op|','
number|'10'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'None'
op|','
name|'timeouts'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'oslo_versionedobjects.base.obj_tree_get_versions'"
op|')'
newline|'\n'
DECL|member|test_object_backport_redirect
name|'def'
name|'test_object_backport_redirect'
op|'('
name|'self'
op|','
name|'mock_ovo'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_ovo'
op|'.'
name|'return_value'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'obj_versions'
newline|'\n'
name|'mock_objinst'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor'
op|','
nl|'\n'
string|"'object_backport_versions'"
op|')'
name|'as'
name|'mock_call'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'object_backport'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctxt'
op|','
nl|'\n'
name|'mock_objinst'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'target_version'
op|')'
newline|'\n'
name|'mock_call'
op|'.'
name|'assert_called_once_with'
op|'('
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctxt'
op|','
nl|'\n'
name|'mock_objinst'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'obj_versions'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorLocalAPITestCase
dedent|''
dedent|''
dedent|''
name|'class'
name|'ConductorLocalAPITestCase'
op|'('
name|'ConductorAPITestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor LocalAPI Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorLocalAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_api'
op|'.'
name|'LocalAPI'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_manager'
op|'.'
name|'_target'
newline|'\n'
nl|'\n'
DECL|member|test_wait_until_ready
dedent|''
name|'def'
name|'test_wait_until_ready'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Override test in ConductorAPITestCase'
nl|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorImportTest
dedent|''
dedent|''
name|'class'
name|'ConductorImportTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_import_conductor_local
indent|' '
name|'def'
name|'test_import_conductor_local'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_local'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'conductor'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'API'
op|'('
op|')'
op|','
name|'conductor_api'
op|'.'
name|'LocalAPI'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'ComputeTaskAPI'
op|'('
op|')'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'LocalComputeTaskAPI'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_import_conductor_rpc
dedent|''
name|'def'
name|'test_import_conductor_rpc'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_local'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'conductor'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'API'
op|'('
op|')'
op|','
name|'conductor_api'
op|'.'
name|'API'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'ComputeTaskAPI'
op|'('
op|')'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'ComputeTaskAPI'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_import_conductor_override_to_local
dedent|''
name|'def'
name|'test_import_conductor_override_to_local'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_local'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'conductor'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'API'
op|'('
name|'use_local'
op|'='
name|'True'
op|')'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'LocalAPI'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'conductor'
op|'.'
name|'ComputeTaskAPI'
op|'('
name|'use_local'
op|'='
name|'True'
op|')'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'LocalComputeTaskAPI'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|_BaseTaskTestCase
dedent|''
dedent|''
name|'class'
name|'_BaseTaskTestCase'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'_BaseTaskTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
newline|'\n'
name|'self'
op|'.'
name|'project_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_PROJECT_ID'
newline|'\n'
name|'self'
op|'.'
name|'context'
op|'='
name|'FakeContext'
op|'('
name|'self'
op|'.'
name|'user_id'
op|','
name|'self'
op|'.'
name|'project_id'
op|')'
newline|'\n'
name|'fake_server_actions'
op|'.'
name|'stub_out_action_events'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake_deserialize_context
name|'def'
name|'fake_deserialize_context'
op|'('
name|'serializer'
op|','
name|'ctxt_dict'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'user_id'
op|','
name|'ctxt_dict'
op|'['
string|"'user_id'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'self'
op|'.'
name|'context'
op|'.'
name|'project_id'
op|','
name|'ctxt_dict'
op|'['
string|"'project_id'"
op|']'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'context'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stubs'
op|'.'
name|'Set'
op|'('
name|'rpc'
op|'.'
name|'RequestContextSerializer'
op|','
string|"'deserialize_context'"
op|','
nl|'\n'
name|'fake_deserialize_context'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'fixtures'
op|'.'
name|'SpawnIsSynchronousFixture'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_prepare_rebuild_args
dedent|''
name|'def'
name|'_prepare_rebuild_args'
op|'('
name|'self'
op|','
name|'update_args'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
comment|"# Args that don't get passed in to the method but do get passed to RPC"
nl|'\n'
indent|' '
name|'migration'
op|'='
name|'update_args'
name|'and'
name|'update_args'
op|'.'
name|'pop'
op|'('
string|"'migration'"
op|','
name|'None'
op|')'
newline|'\n'
name|'node'
op|'='
name|'update_args'
name|'and'
name|'update_args'
op|'.'
name|'pop'
op|'('
string|"'node'"
op|','
name|'None'
op|')'
newline|'\n'
name|'limits'
op|'='
name|'update_args'
name|'and'
name|'update_args'
op|'.'
name|'pop'
op|'('
string|"'limits'"
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'rebuild_args'
op|'='
op|'{'
string|"'new_pass'"
op|':'
string|"'admin_password'"
op|','
nl|'\n'
string|"'injected_files'"
op|':'
string|"'files_to_inject'"
op|','
nl|'\n'
string|"'image_ref'"
op|':'
string|"'image_ref'"
op|','
nl|'\n'
string|"'orig_image_ref'"
op|':'
string|"'orig_image_ref'"
op|','
nl|'\n'
string|"'orig_sys_metadata'"
op|':'
string|"'orig_sys_meta'"
op|','
nl|'\n'
string|"'bdms'"
op|':'
op|'{'
op|'}'
op|','
nl|'\n'
string|"'recreate'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'on_shared_storage'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'preserve_ephemeral'"
op|':'
name|'False'
op|','
nl|'\n'
string|"'host'"
op|':'
string|"'compute-host'"
op|','
nl|'\n'
string|"'request_spec'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'if'
name|'update_args'
op|':'
newline|'\n'
indent|' '
name|'rebuild_args'
op|'.'
name|'update'
op|'('
name|'update_args'
op|')'
newline|'\n'
dedent|''
name|'compute_rebuild_args'
op|'='
name|'copy'
op|'.'
name|'deepcopy'
op|'('
name|'rebuild_args'
op|')'
newline|'\n'
name|'compute_rebuild_args'
op|'['
string|"'migration'"
op|']'
op|'='
name|'migration'
newline|'\n'
name|'compute_rebuild_args'
op|'['
string|"'node'"
op|']'
op|'='
name|'node'
newline|'\n'
name|'compute_rebuild_args'
op|'['
string|"'limits'"
op|']'
op|'='
name|'limits'
newline|'\n'
nl|'\n'
comment|"# Args that are passed in to the method but don't get passed to RPC"
nl|'\n'
name|'compute_rebuild_args'
op|'.'
name|'pop'
op|'('
string|"'request_spec'"
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'rebuild_args'
op|','
name|'compute_rebuild_args'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.Migration'"
op|')'
newline|'\n'
DECL|member|test_live_migrate
name|'def'
name|'test_live_migrate'
op|'('
name|'self'
op|','
name|'migobj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'inst'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'migration'
op|'='
name|'migobj'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'live_migrate'
op|'.'
name|'LiveMigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
name|'task'
op|'='
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_build_live_migrate_task'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
string|"'destination'"
op|','
string|"'block_migration'"
op|','
nl|'\n'
string|"'disk_over_commit'"
op|','
name|'migration'
op|')'
newline|'\n'
name|'task'
op|'.'
name|'execute'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'isinstance'
op|'('
name|'self'
op|'.'
name|'conductor'
op|','
op|'('
name|'conductor_api'
op|'.'
name|'ComputeTaskAPI'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'LocalComputeTaskAPI'
op|')'
op|')'
op|':'
newline|'\n'
comment|"# The API method is actually 'live_migrate_instance'. It gets"
nl|'\n'
comment|"# converted into 'migrate_server' when doing RPC."
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'live_migrate_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
string|"'destination'"
op|','
string|"'block_migration'"
op|','
string|"'disk_over_commit'"
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'destination'"
op|'}'
op|','
name|'True'
op|','
name|'False'
op|','
name|'None'
op|','
nl|'\n'
string|"'block_migration'"
op|','
string|"'disk_over_commit'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'accepted'"
op|','
name|'migration'
op|'.'
name|'status'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'destination'"
op|','
name|'migration'
op|'.'
name|'dest_compute'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'inst_obj'
op|'.'
name|'host'
op|','
name|'migration'
op|'.'
name|'source_compute'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_test_cold_migrate
dedent|''
name|'def'
name|'_test_cold_migrate'
op|'('
name|'self'
op|','
name|'clean_shutdown'
op|'='
name|'True'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
name|'inst'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
name|'image_ref'
op|'='
string|"'image_ref'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'inst'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'inst_obj'
op|'.'
name|'system_metadata'
op|'='
op|'{'
string|"'image_hw_disk_bus'"
op|':'
string|"'scsi'"
op|'}'
newline|'\n'
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_default_flavor'
op|'('
op|')'
newline|'\n'
name|'flavor'
op|'.'
name|'extra_specs'
op|'='
op|'{'
string|"'extra_specs'"
op|':'
string|"'fake'"
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'host1'"
op|','
name|'None'
op|']'
op|']'
op|'}'
op|'}'
newline|'\n'
name|'request_spec'
op|'='
op|'{'
string|"'instance_type'"
op|':'
name|'obj_base'
op|'.'
name|'obj_to_primitive'
op|'('
name|'flavor'
op|')'
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
op|'{'
op|'}'
op|'}'
newline|'\n'
name|'utils'
op|'.'
name|'get_image_from_system_metadata'
op|'('
nl|'\n'
name|'inst_obj'
op|'.'
name|'system_metadata'
op|')'
op|'.'
name|'AndReturn'
op|'('
string|"'image'"
op|')'
newline|'\n'
nl|'\n'
name|'scheduler_utils'
op|'.'
name|'build_request_spec'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
string|"'image'"
op|','
nl|'\n'
op|'['
name|'mox'
op|'.'
name|'IsA'
op|'('
name|'objects'
op|'.'
name|'Instance'
op|')'
op|']'
op|','
nl|'\n'
name|'instance_type'
op|'='
name|'mox'
op|'.'
name|'IsA'
op|'('
name|'objects'
op|'.'
name|'Flavor'
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'request_spec'
op|')'
newline|'\n'
name|'task'
op|'='
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_build_cold_migrate_task'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
name|'flavor'
op|','
name|'filter_properties'
op|','
nl|'\n'
name|'request_spec'
op|','
op|'['
op|']'
op|','
name|'clean_shutdown'
op|'='
name|'clean_shutdown'
op|')'
newline|'\n'
name|'task'
op|'.'
name|'execute'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'scheduler_hint'
op|'='
op|'{'
string|"'filter_properties'"
op|':'
op|'{'
op|'}'
op|'}'
newline|'\n'
nl|'\n'
name|'if'
name|'isinstance'
op|'('
name|'self'
op|'.'
name|'conductor'
op|','
op|'('
name|'conductor_api'
op|'.'
name|'ComputeTaskAPI'
op|','
nl|'\n'
name|'conductor_api'
op|'.'
name|'LocalComputeTaskAPI'
op|')'
op|')'
op|':'
newline|'\n'
comment|"# The API method is actually 'resize_instance'. It gets"
nl|'\n'
comment|"# converted into 'migrate_server' when doing RPC."
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'resize_instance'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
op|'{'
op|'}'
op|','
name|'scheduler_hint'
op|','
name|'flavor'
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
name|'scheduler_hint'
op|','
nl|'\n'
name|'False'
op|','
name|'False'
op|','
name|'flavor'
op|','
name|'None'
op|','
name|'None'
op|','
op|'['
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cold_migrate
dedent|''
dedent|''
name|'def'
name|'test_cold_migrate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_cold_migrate'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cold_migrate_forced_shutdown
dedent|''
name|'def'
name|'test_cold_migrate_forced_shutdown'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_cold_migrate'
op|'('
name|'clean_shutdown'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.objects.Instance.refresh'"
op|')'
newline|'\n'
DECL|member|test_build_instances
name|'def'
name|'test_build_instances'
op|'('
name|'self'
op|','
name|'mock_refresh'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance_type'
op|'='
name|'flavors'
op|'.'
name|'get_default_flavor'
op|'('
op|')'
newline|'\n'
comment|'# NOTE(danms): Avoid datetime timezone issues with converted flavors'
nl|'\n'
name|'instance_type'
op|'.'
name|'created_at'
op|'='
name|'None'
newline|'\n'
name|'instances'
op|'='
op|'['
name|'objects'
op|'.'
name|'Instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'id'
op|'='
name|'i'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuid'
op|'.'
name|'uuid4'
op|'('
op|')'
op|','
nl|'\n'
name|'flavor'
op|'='
name|'instance_type'
op|')'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'instance_type_p'
op|'='
name|'obj_base'
op|'.'
name|'obj_to_primitive'
op|'('
name|'instance_type'
op|')'
newline|'\n'
name|'instance_properties'
op|'='
name|'obj_base'
op|'.'
name|'obj_to_primitive'
op|'('
name|'instances'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'instance_properties'
op|'['
string|"'system_metadata'"
op|']'
op|'='
name|'flavors'
op|'.'
name|'save_flavor_info'
op|'('
nl|'\n'
op|'{'
op|'}'
op|','
name|'instance_type'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'db'
op|','
nl|'\n'
string|"'block_device_mapping_get_all_by_instance'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
newline|'\n'
nl|'\n'
name|'spec'
op|'='
op|'{'
string|"'image'"
op|':'
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instance_properties'
op|','
nl|'\n'
string|"'instance_type'"
op|':'
name|'instance_type_p'
op|','
nl|'\n'
string|"'num_instances'"
op|':'
number|'2'
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_schedule_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'spec'
op|','
name|'filter_properties'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'block_device_mapping_get_all_by_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|')'
op|'.'
name|'AndReturn'
op|'('
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'build_and_run_instance'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'host'
op|'='
string|"'host1'"
op|','
nl|'\n'
name|'image'
op|'='
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
name|'request_spec'
op|'='
op|'{'
nl|'\n'
string|"'image'"
op|':'
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instance_properties'
op|','
nl|'\n'
string|"'instance_type'"
op|':'
name|'instance_type_p'
op|','
nl|'\n'
string|"'num_instances'"
op|':'
number|'2'
op|'}'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'host1'"
op|','
string|"'node1'"
op|']'
op|']'
op|'}'
op|','
nl|'\n'
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'node'
op|'='
string|"'node1'"
op|','
name|'limits'
op|'='
op|'['
op|']'
op|')'
newline|'\n'
name|'db'
op|'.'
name|'block_device_mapping_get_all_by_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'['
number|'1'
op|']'
op|'.'
name|'uuid'
op|')'
op|'.'
name|'AndReturn'
op|'('
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'build_and_run_instance'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'host'
op|'='
string|"'host2'"
op|','
nl|'\n'
name|'image'
op|'='
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
name|'request_spec'
op|'='
op|'{'
nl|'\n'
string|"'image'"
op|':'
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instance_properties'
op|','
nl|'\n'
string|"'instance_type'"
op|':'
name|'instance_type_p'
op|','
nl|'\n'
string|"'num_instances'"
op|':'
number|'2'
op|'}'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'['
op|']'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'host2'"
op|','
string|"'node2'"
op|']'
op|']'
op|'}'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|','
nl|'\n'
name|'node'
op|'='
string|"'node2'"
op|','
name|'limits'
op|'='
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
op|'{'
string|"'fake_data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_cleanup_allocated_networks'"
op|')'
newline|'\n'
DECL|member|test_build_instances_scheduler_failure
name|'def'
name|'test_build_instances_scheduler_failure'
op|'('
nl|'\n'
name|'self'
op|','
name|'cleanup_mock'
op|','
name|'sd_mock'
op|','
name|'state_mock'
op|','
nl|'\n'
name|'sig_mock'
op|','
name|'bs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
name|'spec'
op|'='
op|'{'
string|"'fake'"
op|':'
string|"'specs'"
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instances'
op|'['
number|'0'
op|']'
op|'}'
newline|'\n'
name|'exception'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|"'fake-reason'"
op|')'
newline|'\n'
nl|'\n'
name|'bs_mock'
op|'.'
name|'return_value'
op|'='
name|'spec'
newline|'\n'
name|'sd_mock'
op|'.'
name|'side_effect'
op|'='
name|'exception'
newline|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ERROR'
op|','
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'set_state_calls'
op|'='
op|'['
op|']'
newline|'\n'
name|'cleanup_network_calls'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'instance'
name|'in'
name|'instances'
op|':'
newline|'\n'
indent|' '
name|'set_state_calls'
op|'.'
name|'append'
op|'('
name|'mock'
op|'.'
name|'call'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
string|"'compute_task'"
op|','
string|"'build_instances'"
op|','
nl|'\n'
name|'updates'
op|','
name|'exception'
op|','
name|'spec'
op|')'
op|')'
newline|'\n'
name|'cleanup_network_calls'
op|'.'
name|'append'
op|'('
name|'mock'
op|'.'
name|'call'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'ANY'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
dedent|''
name|'state_mock'
op|'.'
name|'assert_has_calls'
op|'('
name|'set_state_calls'
op|')'
newline|'\n'
name|'cleanup_mock'
op|'.'
name|'assert_has_calls'
op|'('
name|'cleanup_network_calls'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_build_instances_retry_exceeded
dedent|''
name|'def'
name|'test_build_instances_retry_exceeded'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
op|']'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'10'
op|','
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
newline|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ERROR'
op|','
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
nl|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_cleanup_allocated_networks'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'populate_retry'"
op|')'
newline|'\n'
DECL|function|_test
name|'def'
name|'_test'
op|'('
name|'populate_retry'
op|','
nl|'\n'
name|'set_vm_state_and_notify'
op|','
name|'cleanup_mock'
op|')'
op|':'
newline|'\n'
comment|'# build_instances() is a cast, we need to wait for it to'
nl|'\n'
comment|'# complete'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'populate_retry'
op|'.'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'MaxRetriesExceeded'
op|'('
nl|'\n'
name|'reason'
op|'='
string|'"Too many try"'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
name|'filter_properties'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
name|'populate_retry'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'filter_properties'
op|','
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|')'
newline|'\n'
name|'set_vm_state_and_notify'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|','
string|"'compute_task'"
op|','
nl|'\n'
string|"'build_instances'"
op|','
name|'updates'
op|','
name|'mock'
op|'.'
name|'ANY'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'cleanup_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'ANY'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'_test'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_cleanup_allocated_networks'"
op|')'
newline|'\n'
DECL|member|test_build_instances_scheduler_group_failure
name|'def'
name|'test_build_instances_scheduler_group_failure'
op|'('
nl|'\n'
name|'self'
op|','
name|'cleanup_mock'
op|','
name|'state_mock'
op|','
name|'sig_mock'
op|','
name|'bs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
name|'spec'
op|'='
op|'{'
string|"'fake'"
op|':'
string|"'specs'"
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instances'
op|'['
number|'0'
op|']'
op|'}'
newline|'\n'
nl|'\n'
name|'bs_mock'
op|'.'
name|'return_value'
op|'='
name|'spec'
newline|'\n'
name|'exception'
op|'='
name|'exc'
op|'.'
name|'UnsupportedPolicyException'
op|'('
name|'reason'
op|'='
string|"'fake-reason'"
op|')'
newline|'\n'
name|'sig_mock'
op|'.'
name|'side_effect'
op|'='
name|'exception'
newline|'\n'
nl|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ERROR'
op|','
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
name|'set_state_calls'
op|'='
op|'['
op|']'
newline|'\n'
name|'cleanup_network_calls'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'instance'
name|'in'
name|'instances'
op|':'
newline|'\n'
indent|' '
name|'set_state_calls'
op|'.'
name|'append'
op|'('
name|'mock'
op|'.'
name|'call'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'.'
name|'uuid'
op|','
string|"'build_instances'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exception'
op|','
name|'spec'
op|')'
op|')'
newline|'\n'
name|'cleanup_network_calls'
op|'.'
name|'append'
op|'('
name|'mock'
op|'.'
name|'call'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'ANY'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
dedent|''
name|'state_mock'
op|'.'
name|'assert_has_calls'
op|'('
name|'set_state_calls'
op|')'
newline|'\n'
name|'cleanup_mock'
op|'.'
name|'assert_has_calls'
op|'('
name|'cleanup_network_calls'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Instance'
op|','
string|"'refresh'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'InstanceMapping'
op|','
string|"'get_by_instance_uuid'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'InstanceMappingNotFound'
op|'('
name|'uuid'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'HostMapping'
op|','
string|"'get_by_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
DECL|member|test_build_instances_no_instance_mapping
name|'def'
name|'test_build_instances_no_instance_mapping'
op|'('
name|'self'
op|','
name|'_mock_set_state'
op|','
nl|'\n'
name|'mock_select_dests'
op|','
name|'mock_get_by_host'
op|','
name|'mock_get_inst_map_by_uuid'
op|','
nl|'\n'
name|'_mock_refresh'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'mock_select_dests'
op|'.'
name|'return_value'
op|'='
op|'['
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
newline|'\n'
nl|'\n'
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
dedent|''
name|'mock_get_inst_map_by_uuid'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instances'
op|'['
number|'1'
op|']'
op|'.'
name|'uuid'
op|')'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'mock_get_by_host'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Instance'
op|','
string|"'refresh'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'InstanceMapping'
op|','
string|"'get_by_instance_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'HostMapping'
op|','
string|"'get_by_host'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'HostMappingNotFound'
op|'('
name|'name'
op|'='
string|"'fake'"
op|')'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
DECL|member|test_build_instances_no_host_mapping
name|'def'
name|'test_build_instances_no_host_mapping'
op|'('
name|'self'
op|','
name|'_mock_set_state'
op|','
nl|'\n'
name|'mock_select_dests'
op|','
name|'mock_get_by_host'
op|','
name|'mock_get_inst_map_by_uuid'
op|','
nl|'\n'
name|'_mock_refresh'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'mock_select_dests'
op|'.'
name|'return_value'
op|'='
op|'['
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
newline|'\n'
nl|'\n'
name|'num_instances'
op|'='
number|'2'
newline|'\n'
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'num_instances'
op|')'
op|']'
newline|'\n'
name|'inst_mapping_mocks'
op|'='
op|'['
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'num_instances'
op|')'
op|']'
newline|'\n'
name|'mock_get_inst_map_by_uuid'
op|'.'
name|'side_effect'
op|'='
name|'inst_mapping_mocks'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
dedent|''
name|'for'
name|'instance'
name|'in'
name|'instances'
op|':'
newline|'\n'
indent|' '
name|'mock_get_inst_map_by_uuid'
op|'.'
name|'assert_any_call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'.'
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'inst_mapping'
name|'in'
name|'inst_mapping_mocks'
op|':'
newline|'\n'
indent|' '
name|'inst_mapping'
op|'.'
name|'destroy'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'mock_get_by_host'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'host1'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'host2'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Instance'
op|','
string|"'refresh'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'InstanceMapping'
op|','
string|"'get_by_instance_uuid'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'HostMapping'
op|','
string|"'get_by_host'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
DECL|member|test_build_instances_update_instance_mapping
name|'def'
name|'test_build_instances_update_instance_mapping'
op|'('
name|'self'
op|','
name|'_mock_set_state'
op|','
nl|'\n'
name|'mock_select_dests'
op|','
name|'mock_get_by_host'
op|','
name|'mock_get_inst_map_by_uuid'
op|','
nl|'\n'
name|'_mock_refresh'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'mock_select_dests'
op|'.'
name|'return_value'
op|'='
op|'['
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
newline|'\n'
name|'mock_get_by_host'
op|'.'
name|'side_effect'
op|'='
op|'['
nl|'\n'
name|'objects'
op|'.'
name|'HostMapping'
op|'('
name|'cell_mapping'
op|'='
name|'objects'
op|'.'
name|'CellMapping'
op|'('
name|'id'
op|'='
number|'1'
op|')'
op|')'
op|','
nl|'\n'
name|'objects'
op|'.'
name|'HostMapping'
op|'('
name|'cell_mapping'
op|'='
name|'objects'
op|'.'
name|'CellMapping'
op|'('
name|'id'
op|'='
number|'2'
op|')'
op|')'
op|']'
newline|'\n'
nl|'\n'
name|'num_instances'
op|'='
number|'2'
newline|'\n'
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'num_instances'
op|')'
op|']'
newline|'\n'
name|'inst_mapping_mocks'
op|'='
op|'['
name|'mock'
op|'.'
name|'Mock'
op|'('
op|')'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
name|'num_instances'
op|')'
op|']'
newline|'\n'
name|'mock_get_inst_map_by_uuid'
op|'.'
name|'side_effect'
op|'='
name|'inst_mapping_mocks'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
nl|'\n'
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
dedent|''
name|'for'
name|'instance'
name|'in'
name|'instances'
op|':'
newline|'\n'
indent|' '
name|'mock_get_inst_map_by_uuid'
op|'.'
name|'assert_any_call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'.'
name|'uuid'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'inst_mapping'
name|'in'
name|'inst_mapping_mocks'
op|':'
newline|'\n'
indent|' '
name|'inst_mapping'
op|'.'
name|'save'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'inst_mapping_mocks'
op|'['
number|'0'
op|']'
op|'.'
name|'cell_mapping'
op|'.'
name|'id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2'
op|','
name|'inst_mapping_mocks'
op|'['
number|'1'
op|']'
op|'.'
name|'cell_mapping'
op|'.'
name|'id'
op|')'
newline|'\n'
name|'mock_get_by_host'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'host1'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'host2'"
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_instance_on_host
dedent|''
name|'def'
name|'test_unshelve_instance_on_host'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED'
newline|'\n'
name|'instance'
op|'.'
name|'task_state'
op|'='
name|'task_states'
op|'.'
name|'UNSHELVING'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'start_instance'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'unshelve_instance'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'start_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
string|"'fake_image_id'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_offload_instance_on_host_with_request_spec
dedent|''
name|'def'
name|'test_unshelve_offload_instance_on_host_with_request_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'task_state'
op|'='
name|'task_states'
op|'.'
name|'UNSHELVING'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
string|"'fake_image_id'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
nl|'\n'
name|'fake_spec'
op|'='
name|'fake_request_spec'
op|'.'
name|'fake_spec_obj'
op|'('
op|')'
newline|'\n'
comment|'# FIXME(sbauza): Modify the fake RequestSpec object to either add a'
nl|'\n'
comment|'# non-empty SchedulerRetries object or nullify the field'
nl|'\n'
name|'fake_spec'
op|'.'
name|'retry'
op|'='
name|'None'
newline|'\n'
comment|'# FIXME(sbauza): Modify the fake RequestSpec object to either add a'
nl|'\n'
comment|'# non-empty SchedulerLimits object or nullify the field'
nl|'\n'
name|'fake_spec'
op|'.'
name|'limits'
op|'='
name|'None'
newline|'\n'
comment|'# FIXME(sbauza): Modify the fake RequestSpec object to either add a'
nl|'\n'
comment|'# non-empty InstanceGroup object or nullify the field'
nl|'\n'
name|'fake_spec'
op|'.'
name|'instance_group'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'filter_properties'
op|'='
name|'fake_spec'
op|'.'
name|'to_legacy_filter_properties_dict'
op|'('
op|')'
newline|'\n'
name|'request_spec'
op|'='
name|'fake_spec'
op|'.'
name|'to_legacy_request_spec_dict'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'host'
op|'='
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
newline|'\n'
nl|'\n'
comment|'# unshelve_instance() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'unshelve_instance'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'populate_filter_properties'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'populate_retry'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'to_legacy_request_spec_dict'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
nl|'\n'
string|"'to_legacy_filter_properties_dict'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'reset_forced_destinations'"
op|')'
newline|'\n'
DECL|function|do_test
name|'def'
name|'do_test'
op|'('
name|'reset_forced_destinations'
op|','
nl|'\n'
name|'to_filtprops'
op|','
name|'to_reqspec'
op|','
name|'sched_instances'
op|','
nl|'\n'
name|'populate_retry'
op|','
name|'populate_filter_properties'
op|','
nl|'\n'
name|'unshelve_instance'
op|')'
op|':'
newline|'\n'
indent|' '
name|'to_filtprops'
op|'.'
name|'return_value'
op|'='
name|'filter_properties'
newline|'\n'
name|'to_reqspec'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
name|'sched_instances'
op|'.'
name|'return_value'
op|'='
op|'['
name|'host'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|','
name|'fake_spec'
op|')'
newline|'\n'
name|'reset_forced_destinations'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'sched_instances'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_properties'
op|')'
newline|'\n'
comment|'# NOTE(sbauza): Since the instance is dehydrated when passing thru'
nl|'\n'
comment|'# the RPC API, we can only assert mock.ANY for it'
nl|'\n'
name|'unshelve_instance'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'mock'
op|'.'
name|'ANY'
op|','
name|'host'
op|'['
string|"'host'"
op|']'
op|','
name|'image'
op|'='
name|'mock'
op|'.'
name|'ANY'
op|','
nl|'\n'
name|'filter_properties'
op|'='
name|'filter_properties'
op|','
name|'node'
op|'='
name|'host'
op|'['
string|"'nodename'"
op|']'
nl|'\n'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'do_test'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_offloaded_instance_glance_image_not_found
dedent|''
name|'def'
name|'test_unshelve_offloaded_instance_glance_image_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'shelved_image_id'
op|'='
string|'"image_not_found"'
newline|'\n'
nl|'\n'
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'task_state'
op|'='
name|'task_states'
op|'.'
name|'UNSHELVING'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'image_api'
op|','
string|"'get'"
op|')'
newline|'\n'
nl|'\n'
name|'e'
op|'='
name|'exc'
op|'.'
name|'ImageNotFound'
op|'('
name|'image_id'
op|'='
name|'shelved_image_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'image_api'
op|'.'
name|'get'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'shelved_image_id'
op|','
name|'show_deleted'
op|'='
name|'False'
op|')'
op|'.'
name|'AndRaise'
op|'('
name|'e'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
name|'shelved_image_id'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
nl|'\n'
name|'exc'
op|'.'
name|'UnshelveException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'instance'
op|'.'
name|'vm_state'
op|','
name|'vm_states'
op|'.'
name|'ERROR'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_offloaded_instance_image_id_is_none
dedent|''
name|'def'
name|'test_unshelve_offloaded_instance_image_id_is_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'task_state'
op|'='
name|'task_states'
op|'.'
name|'UNSHELVING'
newline|'\n'
comment|"# 'shelved_image_id' is None for volumebacked instance"
nl|'\n'
name|'instance'
op|'.'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
name|'None'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
nl|'\n'
string|"'_schedule_instances'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'unshelve_instance'"
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'schedule_mock'
op|','
name|'unshelve_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'schedule_mock'
op|'.'
name|'return_value'
op|'='
op|'['
op|'{'
string|"'host'"
op|':'
string|"'fake_host'"
op|','
nl|'\n'
string|"'nodename'"
op|':'
string|"'fake_node'"
op|','
nl|'\n'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1'
op|','
name|'unshelve_mock'
op|'.'
name|'call_count'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_instance_schedule_and_rebuild
dedent|''
dedent|''
name|'def'
name|'test_unshelve_instance_schedule_and_rebuild'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'image_api'
op|','
string|"'get'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'unshelve_instance'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'image_api'
op|'.'
name|'get'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'fake_image_id'"
op|','
name|'show_deleted'
op|'='
name|'False'
op|')'
op|'.'
name|'AndReturn'
op|'('
string|"'fake_image'"
op|')'
newline|'\n'
name|'scheduler_utils'
op|'.'
name|'build_request_spec'
op|'('
name|'self'
op|'.'
name|'context'
op|','
string|"'fake_image'"
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
string|"'req_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_schedule_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'req_spec'"
op|','
name|'filter_properties'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
op|'{'
string|"'host'"
op|':'
string|"'fake_host'"
op|','
nl|'\n'
string|"'nodename'"
op|':'
string|"'fake_node'"
op|','
nl|'\n'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|'}'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|','
string|"'fake_host'"
op|','
name|'image'
op|'='
string|"'fake_image'"
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'fake_host'"
op|','
nl|'\n'
string|"'fake_node'"
op|']'
op|']'
op|'}'
op|'}'
op|','
nl|'\n'
name|'node'
op|'='
string|"'fake_node'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
string|"'fake_image_id'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_instance_schedule_and_rebuild_novalid_host
dedent|''
name|'def'
name|'test_unshelve_instance_schedule_and_rebuild_novalid_host'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
DECL|function|fake_schedule_instances
name|'def'
name|'fake_schedule_instances'
op|'('
name|'context'
op|','
name|'image'
op|','
name|'filter_properties'
op|','
nl|'\n'
op|'*'
name|'instances'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|"''"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'image_api'
op|','
string|"'get'"
op|','
nl|'\n'
name|'return_value'
op|'='
string|"'fake_image'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|','
nl|'\n'
name|'fake_schedule_instances'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'_get_image'
op|','
name|'_schedule_instances'
op|')'
op|':'
newline|'\n'
indent|' '
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
string|"'fake_image_id'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
name|'_get_image'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|','
nl|'\n'
name|'show_deleted'
op|'='
name|'False'
op|')'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
op|','
name|'instance'
op|'.'
name|'vm_state'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_schedule_instances'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'messaging'
op|'.'
name|'MessagingTimeout'
op|'('
op|')'
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'image_api'
op|'.'
name|'API'
op|','
string|"'get'"
op|','
name|'return_value'
op|'='
string|"'fake_image'"
op|')'
newline|'\n'
DECL|member|test_unshelve_instance_schedule_and_rebuild_messaging_exception
name|'def'
name|'test_unshelve_instance_schedule_and_rebuild_messaging_exception'
op|'('
nl|'\n'
name|'self'
op|','
name|'mock_get_image'
op|','
name|'mock_schedule_instances'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'task_state'
op|'='
name|'task_states'
op|'.'
name|'UNSHELVING'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|'='
string|"'fake_image_id'"
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'messaging'
op|'.'
name|'MessagingTimeout'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
name|'mock_get_image'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_image_id'"
op|']'
op|','
nl|'\n'
name|'show_deleted'
op|'='
name|'False'
op|')'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
op|','
name|'instance'
op|'.'
name|'vm_state'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'instance'
op|'.'
name|'task_state'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_unshelve_instance_schedule_and_rebuild_volume_backed
dedent|''
name|'def'
name|'test_unshelve_instance_schedule_and_rebuild_volume_backed'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'instance'
op|'.'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'SHELVED_OFFLOADED'
newline|'\n'
name|'instance'
op|'.'
name|'save'
op|'('
op|')'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
newline|'\n'
name|'system_metadata'
op|'='
name|'instance'
op|'.'
name|'system_metadata'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'unshelve_instance'"
op|')'
newline|'\n'
nl|'\n'
name|'scheduler_utils'
op|'.'
name|'build_request_spec'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
string|"'req_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_schedule_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
string|"'req_spec'"
op|','
name|'filter_properties'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
op|'{'
string|"'host'"
op|':'
string|"'fake_host'"
op|','
nl|'\n'
string|"'nodename'"
op|':'
string|"'fake_node'"
op|','
nl|'\n'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|'}'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|','
string|"'fake_host'"
op|','
name|'image'
op|'='
name|'None'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'{'
op|'}'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'fake_host'"
op|','
nl|'\n'
string|"'fake_node'"
op|']'
op|']'
op|'}'
op|'}'
op|','
nl|'\n'
name|'node'
op|'='
string|"'fake_node'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'system_metadata'
op|'['
string|"'shelved_at'"
op|']'
op|'='
name|'timeutils'
op|'.'
name|'utcnow'
op|'('
op|')'
newline|'\n'
name|'system_metadata'
op|'['
string|"'shelved_host'"
op|']'
op|'='
string|"'fake-mini'"
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'unshelve_instance'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rebuild_instance
dedent|''
name|'def'
name|'test_rebuild_instance'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'rebuild_args'
op|','
name|'compute_args'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'inst_obj'
op|'.'
name|'host'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'rebuild_mock'
op|','
name|'select_dest_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'rebuild_instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'select_dest_mock'
op|'.'
name|'called'
op|')'
newline|'\n'
name|'rebuild_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'compute_args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rebuild_instance_with_scheduler
dedent|''
dedent|''
name|'def'
name|'test_rebuild_instance_with_scheduler'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'inst_obj'
op|'.'
name|'host'
op|'='
string|"'noselect'"
newline|'\n'
name|'expected_host'
op|'='
string|"'thebesthost'"
newline|'\n'
name|'expected_node'
op|'='
string|"'thebestnode'"
newline|'\n'
name|'expected_limits'
op|'='
string|"'fake-limits'"
newline|'\n'
name|'rebuild_args'
op|','
name|'compute_args'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'None'
op|','
string|"'node'"
op|':'
name|'expected_node'
op|','
string|"'limits'"
op|':'
name|'expected_limits'
op|'}'
op|')'
newline|'\n'
name|'request_spec'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'ignore_hosts'"
op|':'
op|'['
op|'('
name|'inst_obj'
op|'.'
name|'host'
op|')'
op|']'
op|'}'
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'fake_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|','
nl|'\n'
name|'return_value'
op|'='
op|'['
op|'{'
string|"'host'"
op|':'
name|'expected_host'
op|','
nl|'\n'
string|"'nodename'"
op|':'
name|'expected_node'
op|','
nl|'\n'
string|"'limits'"
op|':'
name|'expected_limits'
op|'}'
op|']'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.utils.build_request_spec'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'request_spec'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'rebuild_mock'
op|','
name|'sig_mock'
op|','
name|'fp_mock'
op|','
name|'select_dest_mock'
op|','
name|'bs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'rebuild_instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'fp_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_properties'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fake_spec'
op|')'
newline|'\n'
name|'compute_args'
op|'['
string|"'host'"
op|']'
op|'='
name|'expected_host'
newline|'\n'
name|'rebuild_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'compute_args'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'compute.instance.rebuild.scheduled'"
op|','
nl|'\n'
name|'fake_notifier'
op|'.'
name|'NOTIFICATIONS'
op|'['
number|'0'
op|']'
op|'.'
name|'event_type'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rebuild_instance_with_scheduler_no_host
dedent|''
name|'def'
name|'test_rebuild_instance_with_scheduler_no_host'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'inst_obj'
op|'.'
name|'host'
op|'='
string|"'noselect'"
newline|'\n'
name|'rebuild_args'
op|','
name|'_'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
op|'{'
string|"'host'"
op|':'
name|'None'
op|'}'
op|')'
newline|'\n'
name|'request_spec'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'ignore_hosts'"
op|':'
op|'['
op|'('
name|'inst_obj'
op|'.'
name|'host'
op|')'
op|']'
op|'}'
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'fake_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|"''"
op|')'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.utils.build_request_spec'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'request_spec'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'rebuild_mock'
op|','
name|'sig_mock'
op|','
name|'fp_mock'
op|','
name|'select_dest_mock'
op|','
name|'bs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'rebuild_instance'
op|','
nl|'\n'
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'fp_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_properties'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'fake_spec'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'rebuild_mock'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'ComputeAPI'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.utils.build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
DECL|member|test_rebuild_instance_with_scheduler_group_failure
name|'def'
name|'test_rebuild_instance_with_scheduler_group_failure'
op|'('
name|'self'
op|','
nl|'\n'
name|'state_mock'
op|','
nl|'\n'
name|'bs_mock'
op|','
nl|'\n'
name|'select_dest_mock'
op|','
nl|'\n'
name|'sig_mock'
op|','
nl|'\n'
name|'rebuild_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'rebuild_args'
op|','
name|'_'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
op|'{'
string|"'host'"
op|':'
name|'None'
op|'}'
op|')'
newline|'\n'
name|'request_spec'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'bs_mock'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
nl|'\n'
name|'exception'
op|'='
name|'exc'
op|'.'
name|'UnsupportedPolicyException'
op|'('
name|'reason'
op|'='
string|"''"
op|')'
newline|'\n'
name|'sig_mock'
op|'.'
name|'side_effect'
op|'='
name|'exception'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'UnsupportedPolicyException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'rebuild_instance'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ACTIVE'
op|','
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'state_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'rebuild_server'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exception'
op|','
name|'request_spec'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'select_dest_mock'
op|'.'
name|'called'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'rebuild_mock'
op|'.'
name|'called'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rebuild_instance_evacuate_migration_record
dedent|''
name|'def'
name|'test_rebuild_instance_evacuate_migration_record'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'migration'
op|'='
name|'objects'
op|'.'
name|'Migration'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'source_compute'
op|'='
name|'inst_obj'
op|'.'
name|'host'
op|','
nl|'\n'
name|'source_node'
op|'='
name|'inst_obj'
op|'.'
name|'node'
op|','
nl|'\n'
name|'instance_uuid'
op|'='
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
name|'status'
op|'='
string|"'accepted'"
op|','
nl|'\n'
name|'migration_type'
op|'='
string|"'evacuation'"
op|')'
newline|'\n'
name|'rebuild_args'
op|','
name|'compute_args'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'inst_obj'
op|'.'
name|'host'
op|','
string|"'migration'"
op|':'
name|'migration'
op|'}'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Migration'
op|','
string|"'get_by_instance_and_status'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'migration'
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'rebuild_mock'
op|','
name|'select_dest_mock'
op|','
name|'get_migration_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'rebuild_instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'select_dest_mock'
op|'.'
name|'called'
op|')'
newline|'\n'
name|'rebuild_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'compute_args'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_rebuild_instance_with_request_spec
dedent|''
dedent|''
name|'def'
name|'test_rebuild_instance_with_request_spec'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'inst_obj'
op|'='
name|'self'
op|'.'
name|'_create_fake_instance_obj'
op|'('
op|')'
newline|'\n'
name|'inst_obj'
op|'.'
name|'host'
op|'='
string|"'noselect'"
newline|'\n'
name|'expected_host'
op|'='
string|"'thebesthost'"
newline|'\n'
name|'expected_node'
op|'='
string|"'thebestnode'"
newline|'\n'
name|'expected_limits'
op|'='
string|"'fake-limits'"
newline|'\n'
name|'request_spec'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'ignore_hosts'"
op|':'
op|'['
op|'('
name|'inst_obj'
op|'.'
name|'host'
op|')'
op|']'
op|'}'
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
name|'ignore_hosts'
op|'='
op|'['
op|']'
op|')'
newline|'\n'
name|'augmented_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
name|'ignore_hosts'
op|'='
op|'['
name|'inst_obj'
op|'.'
name|'host'
op|']'
op|')'
newline|'\n'
name|'rebuild_args'
op|','
name|'compute_args'
op|'='
name|'self'
op|'.'
name|'_prepare_rebuild_args'
op|'('
nl|'\n'
op|'{'
string|"'host'"
op|':'
name|'None'
op|','
string|"'node'"
op|':'
name|'expected_node'
op|','
string|"'limits'"
op|':'
name|'expected_limits'
op|','
nl|'\n'
string|"'request_spec'"
op|':'
name|'fake_spec'
op|'}'
op|')'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'rebuild_instance'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'False'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'augmented_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|','
nl|'\n'
name|'return_value'
op|'='
op|'['
op|'{'
string|"'host'"
op|':'
name|'expected_host'
op|','
nl|'\n'
string|"'nodename'"
op|':'
name|'expected_node'
op|','
nl|'\n'
string|"'limits'"
op|':'
name|'expected_limits'
op|'}'
op|']'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fake_spec'
op|','
string|"'reset_forced_destinations'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fake_spec'
op|','
string|"'to_legacy_request_spec_dict'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'request_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'fake_spec'
op|','
string|"'to_legacy_filter_properties_dict'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'filter_properties'
op|')'
op|','
nl|'\n'
op|')'
name|'as'
op|'('
name|'rebuild_mock'
op|','
name|'sig_mock'
op|','
name|'fp_mock'
op|','
name|'select_dest_mock'
op|','
name|'reset_fd'
op|','
nl|'\n'
name|'to_reqspec'
op|','
name|'to_filtprops'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'rebuild_instance'
op|'('
name|'context'
op|'='
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'rebuild_args'
op|')'
newline|'\n'
name|'reset_fd'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'to_reqspec'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'to_filtprops'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'fp_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_properties'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'augmented_spec'
op|')'
newline|'\n'
name|'compute_args'
op|'['
string|"'host'"
op|']'
op|'='
name|'expected_host'
newline|'\n'
name|'rebuild_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|','
nl|'\n'
op|'**'
name|'compute_args'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'compute.instance.rebuild.scheduled'"
op|','
nl|'\n'
name|'fake_notifier'
op|'.'
name|'NOTIFICATIONS'
op|'['
number|'0'
op|']'
op|'.'
name|'event_type'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorTaskTestCase
dedent|''
dedent|''
name|'class'
name|'ConductorTaskTestCase'
op|'('
name|'_BaseTaskTestCase'
op|','
name|'test_compute'
op|'.'
name|'BaseTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""ComputeTaskManager Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorTaskTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor'
newline|'\n'
nl|'\n'
DECL|member|test_reset
dedent|''
name|'def'
name|'test_reset'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.compute.rpcapi.ComputeAPI'"
op|')'
name|'as'
name|'mock_rpc'
op|':'
newline|'\n'
indent|' '
name|'old_rpcapi'
op|'='
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'reset'
op|'('
op|')'
newline|'\n'
name|'mock_rpc'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
name|'old_rpcapi'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_migrate_server_fails_with_rebuild
dedent|''
dedent|''
name|'def'
name|'test_migrate_server_fails_with_rebuild'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'NotImplementedError'
op|','
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|','
name|'None'
op|','
name|'True'
op|','
name|'True'
op|','
name|'None'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_migrate_server_fails_with_flavor
dedent|''
name|'def'
name|'test_migrate_server_fails_with_flavor'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'NotImplementedError'
op|','
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'None'
op|','
name|'None'
op|','
name|'True'
op|','
name|'False'
op|','
name|'flavor'
op|','
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_build_request_spec
dedent|''
name|'def'
name|'_build_request_spec'
op|'('
name|'self'
op|','
name|'instance'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'{'
nl|'\n'
string|"'instance_properties'"
op|':'
op|'{'
nl|'\n'
string|"'uuid'"
op|':'
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|','
op|'}'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'live_migrate'
op|'.'
name|'LiveMigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
DECL|member|_test_migrate_server_deals_with_expected_exceptions
name|'def'
name|'_test_migrate_server_deals_with_expected_exceptions'
op|'('
name|'self'
op|','
name|'ex'
op|','
nl|'\n'
name|'mock_execute'
op|','
name|'mock_set'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'ACTIVE'
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'instance'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'side_effect'
op|'='
name|'ex'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'utils'
op|'.'
name|'ExceptionHelper'
op|'('
name|'self'
op|'.'
name|'conductor'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'type'
op|'('
name|'ex'
op|')'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|','
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'destination'"
op|'}'
op|','
name|'True'
op|','
name|'False'
op|','
name|'None'
op|','
string|"'block_migration'"
op|','
nl|'\n'
string|"'disk_over_commit'"
op|')'
newline|'\n'
nl|'\n'
name|'mock_set'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'compute_task'"
op|','
string|"'migrate_server'"
op|','
nl|'\n'
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ACTIVE'
op|','
nl|'\n'
string|"'task_state'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'expected_task_state'"
op|':'
name|'task_states'
op|'.'
name|'MIGRATING'
op|'}'
op|','
nl|'\n'
name|'ex'
op|','
name|'self'
op|'.'
name|'_build_request_spec'
op|'('
name|'inst_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_migrate_server_deals_with_invalidcpuinfo_exception
dedent|''
name|'def'
name|'test_migrate_server_deals_with_invalidcpuinfo_exception'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'ACTIVE'
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'instance'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'live_migrate'
op|'.'
name|'LiveMigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
nl|'\n'
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
nl|'\n'
name|'ex'
op|'='
name|'exc'
op|'.'
name|'InvalidCPUInfo'
op|'('
name|'reason'
op|'='
string|'"invalid cpu info."'
op|')'
newline|'\n'
nl|'\n'
name|'task'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_build_live_migrate_task'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
string|"'destination'"
op|','
string|"'block_migration'"
op|','
nl|'\n'
string|"'disk_over_commit'"
op|','
name|'mox'
op|'.'
name|'IsA'
op|'('
name|'objects'
op|'.'
name|'Migration'
op|')'
op|')'
newline|'\n'
name|'task'
op|'.'
name|'execute'
op|'('
op|')'
op|'.'
name|'AndRaise'
op|'('
name|'ex'
op|')'
newline|'\n'
nl|'\n'
name|'scheduler_utils'
op|'.'
name|'set_vm_state_and_notify'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'compute_task'"
op|','
string|"'migrate_server'"
op|','
nl|'\n'
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ACTIVE'
op|','
nl|'\n'
string|"'task_state'"
op|':'
name|'None'
op|','
nl|'\n'
string|"'expected_task_state'"
op|':'
name|'task_states'
op|'.'
name|'MIGRATING'
op|'}'
op|','
nl|'\n'
name|'ex'
op|','
name|'self'
op|'.'
name|'_build_request_spec'
op|'('
name|'inst_obj'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'utils'
op|'.'
name|'ExceptionHelper'
op|'('
name|'self'
op|'.'
name|'conductor'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'InvalidCPUInfo'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|','
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'destination'"
op|'}'
op|','
name|'True'
op|','
name|'False'
op|','
name|'None'
op|','
string|"'block_migration'"
op|','
nl|'\n'
string|"'disk_over_commit'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_migrate_server_deals_with_expected_exception
dedent|''
name|'def'
name|'test_migrate_server_deals_with_expected_exception'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'exs'
op|'='
op|'['
name|'exc'
op|'.'
name|'InstanceInvalidState'
op|'('
name|'instance_uuid'
op|'='
string|'"fake"'
op|','
name|'attr'
op|'='
string|"''"
op|','
nl|'\n'
name|'state'
op|'='
string|"''"
op|','
name|'method'
op|'='
string|"''"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'DestinationHypervisorTooOld'
op|'('
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'HypervisorUnavailable'
op|'('
name|'host'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'LiveMigrationWithOldNovaNotSupported'
op|'('
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'MigrationPreCheckError'
op|'('
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'MigrationPreCheckClientException'
op|'('
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'InvalidSharedStorage'
op|'('
name|'path'
op|'='
string|"'dummy'"
op|','
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'ComputeServiceUnavailable'
op|'('
name|'host'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'InvalidHypervisorType'
op|'('
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'InvalidCPUInfo'
op|'('
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'UnableToMigrateToSelf'
op|'('
name|'instance_id'
op|'='
string|"'dummy'"
op|','
name|'host'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'InvalidLocalStorage'
op|'('
name|'path'
op|'='
string|"'dummy'"
op|','
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|','
nl|'\n'
name|'exc'
op|'.'
name|'MigrationSchedulerRPCError'
op|'('
name|'reason'
op|'='
string|"'dummy'"
op|')'
op|']'
newline|'\n'
name|'for'
name|'ex'
name|'in'
name|'exs'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_test_migrate_server_deals_with_expected_exceptions'
op|'('
name|'ex'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'live_migrate'
op|'.'
name|'LiveMigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
DECL|member|test_migrate_server_deals_with_unexpected_exceptions
name|'def'
name|'test_migrate_server_deals_with_unexpected_exceptions'
op|'('
name|'self'
op|','
nl|'\n'
name|'mock_live_migrate'
op|','
name|'mock_set_state'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected_ex'
op|'='
name|'IOError'
op|'('
string|"'fake error'"
op|')'
newline|'\n'
name|'mock_live_migrate'
op|'.'
name|'side_effect'
op|'='
name|'expected_ex'
newline|'\n'
name|'instance'
op|'='
name|'fake_instance'
op|'.'
name|'fake_db_instance'
op|'('
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'.'
name|'_from_db_object'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'objects'
op|'.'
name|'Instance'
op|'('
op|')'
op|','
name|'instance'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'ex'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'MigrationError'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'migrate_server'
op|','
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'destination'"
op|'}'
op|','
name|'True'
op|','
name|'False'
op|','
name|'None'
op|','
string|"'block_migration'"
op|','
nl|'\n'
string|"'disk_over_commit'"
op|')'
newline|'\n'
name|'request_spec'
op|'='
op|'{'
string|"'instance_properties'"
op|':'
op|'{'
nl|'\n'
string|"'uuid'"
op|':'
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|','
op|'}'
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'mock_set_state'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'['
string|"'uuid'"
op|']'
op|','
nl|'\n'
string|"'compute_task'"
op|','
string|"'migrate_server'"
op|','
nl|'\n'
name|'dict'
op|'('
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'ERROR'
op|','
nl|'\n'
name|'task_state'
op|'='
name|'inst_obj'
op|'.'
name|'task_state'
op|','
nl|'\n'
name|'expected_task_state'
op|'='
name|'task_states'
op|'.'
name|'MIGRATING'
op|','
op|')'
op|','
nl|'\n'
name|'expected_ex'
op|','
name|'request_spec'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ex'
op|'.'
name|'kwargs'
op|'['
string|"'reason'"
op|']'
op|','
name|'six'
op|'.'
name|'text_type'
op|'('
name|'expected_ex'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_set_vm_state_and_notify
dedent|''
name|'def'
name|'test_set_vm_state_and_notify'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
nl|'\n'
string|"'set_vm_state_and_notify'"
op|')'
newline|'\n'
name|'scheduler_utils'
op|'.'
name|'set_vm_state_and_notify'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
number|'1'
op|','
string|"'compute_task'"
op|','
string|"'method'"
op|','
string|"'updates'"
op|','
nl|'\n'
string|"'ex'"
op|','
string|"'request_spec'"
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_set_vm_state_and_notify'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
number|'1'
op|','
string|"'method'"
op|','
string|"'updates'"
op|','
string|"'ex'"
op|','
string|"'request_spec'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Quotas'
op|','
string|"'from_reservations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
newline|'\n'
DECL|member|test_cold_migrate_no_valid_host_back_in_active_state
name|'def'
name|'test_cold_migrate_no_valid_host_back_in_active_state'
op|'('
nl|'\n'
name|'self'
op|','
name|'rollback_mock'
op|','
name|'notify_mock'
op|','
name|'select_dest_mock'
op|','
name|'quotas_mock'
op|','
nl|'\n'
name|'metadata_mock'
op|','
name|'spec_fp_mock'
op|','
name|'sig_mock'
op|','
name|'brs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'ACTIVE'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
name|'extra_specs'
op|'='
name|'dict'
op|'('
op|')'
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
name|'metadata_mock'
op|'.'
name|'return_value'
op|'='
name|'image'
newline|'\n'
name|'brs_mock'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
name|'spec_fp_mock'
op|'.'
name|'return_value'
op|'='
name|'fake_spec'
newline|'\n'
name|'exc_info'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|'""'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'side_effect'
op|'='
name|'exc_info'
newline|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'ACTIVE'
op|','
nl|'\n'
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
name|'flavor'
op|','
name|'filter_props'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
name|'metadata_mock'
op|'.'
name|'assert_called_with'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'brs_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'image'
op|','
nl|'\n'
op|'['
name|'inst_obj'
op|']'
op|','
nl|'\n'
name|'instance_type'
op|'='
name|'flavor'
op|')'
newline|'\n'
name|'quotas_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|')'
newline|'\n'
name|'sig_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_props'
op|')'
newline|'\n'
name|'notify_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'migrate_server'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exc_info'
op|','
name|'request_spec'
op|')'
newline|'\n'
name|'rollback_mock'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Quotas'
op|','
string|"'from_reservations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
newline|'\n'
DECL|member|test_cold_migrate_no_valid_host_back_in_stopped_state
name|'def'
name|'test_cold_migrate_no_valid_host_back_in_stopped_state'
op|'('
nl|'\n'
name|'self'
op|','
name|'rollback_mock'
op|','
name|'notify_mock'
op|','
name|'select_dest_mock'
op|','
name|'quotas_mock'
op|','
nl|'\n'
name|'metadata_mock'
op|','
name|'spec_fp_mock'
op|','
name|'sig_mock'
op|','
name|'brs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
name|'extra_specs'
op|'='
name|'dict'
op|'('
op|')'
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'metadata_mock'
op|'.'
name|'return_value'
op|'='
name|'image'
newline|'\n'
name|'brs_mock'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
name|'spec_fp_mock'
op|'.'
name|'return_value'
op|'='
name|'fake_spec'
newline|'\n'
name|'exc_info'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|'""'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'side_effect'
op|'='
name|'exc_info'
newline|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
nl|'\n'
name|'flavor'
op|','
name|'filter_props'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
name|'metadata_mock'
op|'.'
name|'assert_called_with'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'brs_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'image'
op|','
nl|'\n'
op|'['
name|'inst_obj'
op|']'
op|','
nl|'\n'
name|'instance_type'
op|'='
name|'flavor'
op|')'
newline|'\n'
name|'quotas_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|')'
newline|'\n'
name|'sig_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_props'
op|')'
newline|'\n'
name|'notify_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'migrate_server'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exc_info'
op|','
name|'request_spec'
op|')'
newline|'\n'
name|'rollback_mock'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_cold_migrate_no_valid_host_error_msg
dedent|''
name|'def'
name|'test_cold_migrate_no_valid_host_error_msg'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
name|'extra_specs'
op|'='
name|'dict'
op|'('
op|')'
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'image'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'request_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor'
op|','
string|"'_set_vm_state_and_notify'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
nl|'\n'
string|"'execute'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|'""'
op|')'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'image_mock'
op|','
name|'brs_mock'
op|','
name|'set_vm_mock'
op|','
name|'task_execute_mock'
op|','
nl|'\n'
name|'task_rollback_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'nvh'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|','
name|'flavor'
op|','
name|'filter_props'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'cold migrate'"
op|','
name|'nvh'
op|'.'
name|'message'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.utils.build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'execute'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
DECL|member|test_cold_migrate_no_valid_host_in_group
name|'def'
name|'test_cold_migrate_no_valid_host_in_group'
op|'('
name|'self'
op|','
nl|'\n'
name|'set_vm_mock'
op|','
nl|'\n'
name|'task_rollback_mock'
op|','
nl|'\n'
name|'task_exec_mock'
op|','
nl|'\n'
name|'brs_mock'
op|','
nl|'\n'
name|'image_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
name|'extra_specs'
op|'='
name|'dict'
op|'('
op|')'
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
name|'exception'
op|'='
name|'exc'
op|'.'
name|'UnsupportedPolicyException'
op|'('
name|'reason'
op|'='
string|"''"
op|')'
newline|'\n'
nl|'\n'
name|'image_mock'
op|'.'
name|'return_value'
op|'='
name|'image'
newline|'\n'
name|'brs_mock'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
name|'task_exec_mock'
op|'.'
name|'side_effect'
op|'='
name|'exception'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'UnsupportedPolicyException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|','
name|'flavor'
op|','
name|'filter_props'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'set_vm_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'migrate_server'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exception'
op|','
name|'request_spec'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'Quotas'
op|','
string|"'from_reservations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_client'
op|'.'
name|'SchedulerClient'
op|','
string|"'select_destinations'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'conductor_manager'
op|'.'
name|'ComputeTaskManager'
op|','
nl|'\n'
string|"'_set_vm_state_and_notify'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'compute_rpcapi'
op|'.'
name|'ComputeAPI'
op|','
string|"'prep_resize'"
op|')'
newline|'\n'
DECL|member|test_cold_migrate_exception_host_in_error_state_and_raise
name|'def'
name|'test_cold_migrate_exception_host_in_error_state_and_raise'
op|'('
nl|'\n'
name|'self'
op|','
name|'prep_resize_mock'
op|','
name|'rollback_mock'
op|','
name|'notify_mock'
op|','
nl|'\n'
name|'select_dest_mock'
op|','
name|'quotas_mock'
op|','
name|'metadata_mock'
op|','
name|'spec_fp_mock'
op|','
nl|'\n'
name|'sig_mock'
op|','
name|'brs_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'fake_spec'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'hosts'
op|'='
op|'['
name|'dict'
op|'('
name|'host'
op|'='
string|"'host1'"
op|','
name|'nodename'
op|'='
name|'None'
op|','
name|'limits'
op|'='
op|'{'
op|'}'
op|')'
op|']'
newline|'\n'
name|'metadata_mock'
op|'.'
name|'return_value'
op|'='
name|'image'
newline|'\n'
name|'brs_mock'
op|'.'
name|'return_value'
op|'='
name|'request_spec'
newline|'\n'
name|'spec_fp_mock'
op|'.'
name|'return_value'
op|'='
name|'fake_spec'
newline|'\n'
name|'exc_info'
op|'='
name|'test'
op|'.'
name|'TestingException'
op|'('
string|"'something happened'"
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'return_value'
op|'='
name|'hosts'
newline|'\n'
nl|'\n'
name|'updates'
op|'='
op|'{'
string|"'vm_state'"
op|':'
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
string|"'task_state'"
op|':'
name|'None'
op|'}'
newline|'\n'
name|'prep_resize_mock'
op|'.'
name|'side_effect'
op|'='
name|'exc_info'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'test'
op|'.'
name|'TestingException'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|','
name|'flavor'
op|','
nl|'\n'
name|'filter_props'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
name|'metadata_mock'
op|'.'
name|'assert_called_with'
op|'('
op|'{'
op|'}'
op|')'
newline|'\n'
name|'brs_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'image'
op|','
nl|'\n'
op|'['
name|'inst_obj'
op|']'
op|','
nl|'\n'
name|'instance_type'
op|'='
name|'flavor'
op|')'
newline|'\n'
name|'quotas_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'instance'
op|'='
name|'inst_obj'
op|')'
newline|'\n'
name|'sig_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'request_spec'
op|','
nl|'\n'
name|'filter_props'
op|')'
newline|'\n'
name|'select_dest_mock'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'fake_spec'
op|')'
newline|'\n'
name|'prep_resize_mock'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'image'
op|','
name|'inst_obj'
op|','
name|'flavor'
op|','
nl|'\n'
name|'hosts'
op|'['
number|'0'
op|']'
op|'['
string|"'host'"
op|']'
op|','
op|'['
name|'resvs'
op|']'
op|','
nl|'\n'
name|'request_spec'
op|'='
name|'request_spec'
op|','
nl|'\n'
name|'filter_properties'
op|'='
name|'filter_props'
op|','
nl|'\n'
name|'node'
op|'='
name|'hosts'
op|'['
number|'0'
op|']'
op|'['
string|"'nodename'"
op|']'
op|','
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
name|'notify_mock'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'inst_obj'
op|'.'
name|'uuid'
op|','
nl|'\n'
string|"'migrate_server'"
op|','
name|'updates'
op|','
nl|'\n'
name|'exc_info'
op|','
name|'request_spec'
op|')'
newline|'\n'
name|'rollback_mock'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_resize_no_valid_host_error_msg
dedent|''
name|'def'
name|'test_resize_no_valid_host_error_msg'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.tiny'"
op|')'
newline|'\n'
name|'flavor_new'
op|'='
name|'flavors'
op|'.'
name|'get_flavor_by_name'
op|'('
string|"'m1.small'"
op|')'
newline|'\n'
name|'inst_obj'
op|'='
name|'objects'
op|'.'
name|'Instance'
op|'('
nl|'\n'
name|'image_ref'
op|'='
string|"'fake-image_ref'"
op|','
nl|'\n'
name|'vm_state'
op|'='
name|'vm_states'
op|'.'
name|'STOPPED'
op|','
nl|'\n'
name|'instance_type_id'
op|'='
name|'flavor'
op|'['
string|"'id'"
op|']'
op|','
nl|'\n'
name|'system_metadata'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'uuid'
op|'='
name|'uuids'
op|'.'
name|'instance'
op|','
nl|'\n'
name|'user_id'
op|'='
name|'fakes'
op|'.'
name|'FAKE_USER_ID'
op|')'
newline|'\n'
nl|'\n'
name|'request_spec'
op|'='
name|'dict'
op|'('
name|'instance_type'
op|'='
name|'dict'
op|'('
name|'extra_specs'
op|'='
name|'dict'
op|'('
op|')'
op|')'
op|','
nl|'\n'
name|'instance_properties'
op|'='
name|'dict'
op|'('
op|')'
op|')'
newline|'\n'
name|'filter_props'
op|'='
name|'dict'
op|'('
name|'context'
op|'='
name|'None'
op|')'
newline|'\n'
name|'resvs'
op|'='
string|"'fake-resvs'"
newline|'\n'
name|'image'
op|'='
string|"'fake-image'"
newline|'\n'
nl|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'utils'
op|','
string|"'get_image_from_system_metadata'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'image'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|','
nl|'\n'
name|'return_value'
op|'='
name|'request_spec'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor'
op|','
string|"'_set_vm_state_and_notify'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
nl|'\n'
string|"'execute'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'NoValidHost'
op|'('
name|'reason'
op|'='
string|'""'
op|')'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'migrate'
op|'.'
name|'MigrationTask'
op|','
string|"'rollback'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'image_mock'
op|','
name|'brs_mock'
op|','
name|'vm_st_mock'
op|','
name|'task_execute_mock'
op|','
nl|'\n'
name|'task_rb_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'nvh'
op|'='
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exc'
op|'.'
name|'NoValidHost'
op|','
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_cold_migrate'
op|','
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'inst_obj'
op|','
name|'flavor_new'
op|','
name|'filter_props'
op|','
nl|'\n'
op|'['
name|'resvs'
op|']'
op|','
name|'clean_shutdown'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
string|"'resize'"
op|','
name|'nvh'
op|'.'
name|'message'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_build_instances_instance_not_found
dedent|''
dedent|''
name|'def'
name|'test_build_instances_instance_not_found'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'instances'
op|'['
number|'0'
op|']'
op|','
string|"'refresh'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'instances'
op|'['
number|'1'
op|']'
op|','
string|"'refresh'"
op|')'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
name|'spec'
op|'='
op|'{'
string|"'fake'"
op|':'
string|"'specs'"
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instances'
op|'['
number|'0'
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|','
string|"'_schedule_instances'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'StubOutWithMock'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
newline|'\n'
nl|'\n'
name|'scheduler_utils'
op|'.'
name|'build_request_spec'
op|'('
name|'self'
op|'.'
name|'context'
op|','
name|'image'
op|','
nl|'\n'
name|'mox'
op|'.'
name|'IgnoreArg'
op|'('
op|')'
op|')'
op|'.'
name|'AndReturn'
op|'('
name|'spec'
op|')'
newline|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'_schedule_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'spec'
op|','
name|'filter_properties'
op|')'
op|'.'
name|'AndReturn'
op|'('
nl|'\n'
op|'['
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
op|')'
newline|'\n'
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'refresh'
op|'('
op|')'
op|'.'
name|'AndRaise'
op|'('
nl|'\n'
name|'exc'
op|'.'
name|'InstanceNotFound'
op|'('
name|'instance_id'
op|'='
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|')'
op|')'
newline|'\n'
name|'instances'
op|'['
number|'1'
op|']'
op|'.'
name|'refresh'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|'.'
name|'build_and_run_instance'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'instance'
op|'='
name|'instances'
op|'['
number|'1'
op|']'
op|','
name|'host'
op|'='
string|"'host2'"
op|','
nl|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
op|','
name|'request_spec'
op|'='
name|'spec'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'['
op|']'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'host2'"
op|','
nl|'\n'
string|"'node2'"
op|']'
op|']'
op|'}'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
name|'mox'
op|'.'
name|'IsA'
op|'('
name|'objects'
op|'.'
name|'BlockDeviceMappingList'
op|')'
op|','
nl|'\n'
name|'node'
op|'='
string|"'node2'"
op|','
name|'limits'
op|'='
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'mox'
op|'.'
name|'ReplayAll'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'setup_instance_group'"
op|')'
newline|'\n'
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'scheduler_utils'
op|','
string|"'build_request_spec'"
op|')'
newline|'\n'
DECL|member|test_build_instances_info_cache_not_found
name|'def'
name|'test_build_instances_info_cache_not_found'
op|'('
name|'self'
op|','
name|'build_request_spec'
op|','
nl|'\n'
name|'setup_instance_group'
op|')'
op|':'
newline|'\n'
indent|' '
name|'instances'
op|'='
op|'['
name|'fake_instance'
op|'.'
name|'fake_instance_obj'
op|'('
name|'self'
op|'.'
name|'context'
op|')'
nl|'\n'
name|'for'
name|'i'
name|'in'
name|'range'
op|'('
number|'2'
op|')'
op|']'
newline|'\n'
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
string|"'should_pass_silently'"
op|'}'
newline|'\n'
name|'destinations'
op|'='
op|'['
op|'{'
string|"'host'"
op|':'
string|"'host1'"
op|','
string|"'nodename'"
op|':'
string|"'node1'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|','
nl|'\n'
op|'{'
string|"'host'"
op|':'
string|"'host2'"
op|','
string|"'nodename'"
op|':'
string|"'node2'"
op|','
string|"'limits'"
op|':'
op|'['
op|']'
op|'}'
op|']'
newline|'\n'
name|'spec'
op|'='
op|'{'
string|"'fake'"
op|':'
string|"'specs'"
op|','
nl|'\n'
string|"'instance_properties'"
op|':'
name|'instances'
op|'['
number|'0'
op|']'
op|'}'
newline|'\n'
name|'build_request_spec'
op|'.'
name|'return_value'
op|'='
name|'spec'
newline|'\n'
name|'with'
name|'test'
op|'.'
name|'nested'
op|'('
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'instances'
op|'['
number|'0'
op|']'
op|','
string|"'refresh'"
op|','
nl|'\n'
name|'side_effect'
op|'='
name|'exc'
op|'.'
name|'InstanceInfoCacheNotFound'
op|'('
nl|'\n'
name|'instance_uuid'
op|'='
name|'instances'
op|'['
number|'0'
op|']'
op|'.'
name|'uuid'
op|')'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'instances'
op|'['
number|'1'
op|']'
op|','
string|"'refresh'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'objects'
op|'.'
name|'RequestSpec'
op|','
string|"'from_primitives'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'scheduler_client'
op|','
nl|'\n'
string|"'select_destinations'"
op|','
name|'return_value'
op|'='
name|'destinations'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'self'
op|'.'
name|'conductor_manager'
op|'.'
name|'compute_rpcapi'
op|','
nl|'\n'
string|"'build_and_run_instance'"
op|')'
nl|'\n'
op|')'
name|'as'
op|'('
name|'inst1_refresh'
op|','
name|'inst2_refresh'
op|','
name|'from_primitives'
op|','
nl|'\n'
name|'select_destinations'
op|','
nl|'\n'
name|'build_and_run_instance'
op|')'
op|':'
newline|'\n'
nl|'\n'
comment|'# build_instances() is a cast, we need to wait for it to complete'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'cast_as_call'
op|'.'
name|'CastAsCall'
op|'('
name|'self'
op|'.'
name|'stubs'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'build_instances'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instances'
op|'='
name|'instances'
op|','
nl|'\n'
name|'image'
op|'='
name|'image'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
string|"'block_device_mapping'"
op|','
nl|'\n'
name|'legacy_bdm'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
comment|'# NOTE(sbauza): Due to populate_retry() later in the code,'
nl|'\n'
comment|'# filter_properties is dynamically modified'
nl|'\n'
name|'setup_instance_group'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'context'
op|','
name|'spec'
op|','
op|'{'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|']'
op|'}'
op|'}'
op|')'
newline|'\n'
name|'build_and_run_instance'
op|'.'
name|'assert_called_once_with'
op|'('
name|'self'
op|'.'
name|'context'
op|','
nl|'\n'
name|'instance'
op|'='
name|'instances'
op|'['
number|'1'
op|']'
op|','
name|'host'
op|'='
string|"'host2'"
op|','
name|'image'
op|'='
op|'{'
string|"'fake-data'"
op|':'
nl|'\n'
string|"'should_pass_silently'"
op|'}'
op|','
name|'request_spec'
op|'='
name|'spec'
op|','
nl|'\n'
name|'filter_properties'
op|'='
op|'{'
string|"'limits'"
op|':'
op|'['
op|']'
op|','
nl|'\n'
string|"'retry'"
op|':'
op|'{'
string|"'num_attempts'"
op|':'
number|'1'
op|','
nl|'\n'
string|"'hosts'"
op|':'
op|'['
op|'['
string|"'host2'"
op|','
nl|'\n'
string|"'node2'"
op|']'
op|']'
op|'}'
op|'}'
op|','
nl|'\n'
name|'admin_password'
op|'='
string|"'admin_password'"
op|','
nl|'\n'
name|'injected_files'
op|'='
string|"'injected_files'"
op|','
nl|'\n'
name|'requested_networks'
op|'='
name|'None'
op|','
nl|'\n'
name|'security_groups'
op|'='
string|"'security_groups'"
op|','
nl|'\n'
name|'block_device_mapping'
op|'='
name|'mock'
op|'.'
name|'ANY'
op|','
nl|'\n'
name|'node'
op|'='
string|"'node2'"
op|','
name|'limits'
op|'='
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
dedent|''
name|'class'
name|'ConductorTaskRPCAPITestCase'
op|'('
name|'_BaseTaskTestCase'
op|','
nl|'\n'
DECL|class|ConductorTaskRPCAPITestCase
name|'test_compute'
op|'.'
name|'BaseTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor compute_task RPC namespace Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorTaskRPCAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_service'
op|'='
name|'self'
op|'.'
name|'start_service'
op|'('
nl|'\n'
string|"'conductor'"
op|','
name|'manager'
op|'='
string|"'nova.conductor.manager.ConductorManager'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_rpcapi'
op|'.'
name|'ComputeTaskAPI'
op|'('
op|')'
newline|'\n'
name|'service_manager'
op|'='
name|'self'
op|'.'
name|'conductor_service'
op|'.'
name|'manager'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'service_manager'
op|'.'
name|'compute_task_mgr'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorTaskAPITestCase
dedent|''
dedent|''
name|'class'
name|'ConductorTaskAPITestCase'
op|'('
name|'_BaseTaskTestCase'
op|','
name|'test_compute'
op|'.'
name|'BaseTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Compute task API Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorTaskAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_service'
op|'='
name|'self'
op|'.'
name|'start_service'
op|'('
nl|'\n'
string|"'conductor'"
op|','
name|'manager'
op|'='
string|"'nova.conductor.manager.ConductorManager'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_api'
op|'.'
name|'ComputeTaskAPI'
op|'('
op|')'
newline|'\n'
name|'service_manager'
op|'='
name|'self'
op|'.'
name|'conductor_service'
op|'.'
name|'manager'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'service_manager'
op|'.'
name|'compute_task_mgr'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ConductorLocalComputeTaskAPITestCase
dedent|''
dedent|''
name|'class'
name|'ConductorLocalComputeTaskAPITestCase'
op|'('
name|'ConductorTaskAPITestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Conductor LocalComputeTaskAPI Tests."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ConductorLocalComputeTaskAPITestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor'
op|'='
name|'conductor_api'
op|'.'
name|'LocalComputeTaskAPI'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'conductor_manager'
op|'='
name|'self'
op|'.'
name|'conductor'
op|'.'
name|'_manager'
op|'.'
name|'_target'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.35094 | 88 | 0.60885 | 24,393 | 166,256 | 4.019309 | 0.023203 | 0.181451 | 0.069051 | 0.043144 | 0.943872 | 0.919535 | 0.897851 | 0.86512 | 0.838326 | 0.811103 | 0 | 0.000993 | 0.09157 | 166,256 | 13,460 | 89 | 12.351857 | 0.648161 | 0 | 0 | 0.976523 | 0 | 0 | 0.387493 | 0.054067 | 0 | 0 | 0 | 0 | 0.008767 | 0 | null | null | 0.003566 | 0.003492 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c2d520a75c5b2fdcf780dc54942555c5e180359e | 181 | py | Python | conch/distance/__init__.py | mmcauliffe/Conch | 7668612d7a610d0f5ae3332f990e71b26c5e8b34 | [
"MIT"
] | 33 | 2015-06-10T19:36:54.000Z | 2017-09-18T23:57:46.000Z | conch/distance/__init__.py | mmcauliffe/Conch | 7668612d7a610d0f5ae3332f990e71b26c5e8b34 | [
"MIT"
] | 5 | 2015-03-22T07:05:21.000Z | 2017-08-16T03:48:19.000Z | conch/distance/__init__.py | mmcauliffe/Conch | 7668612d7a610d0f5ae3332f990e71b26c5e8b34 | [
"MIT"
] | 10 | 2015-04-29T05:51:51.000Z | 2017-07-12T18:52:18.000Z | from conch.distance.dtw import DtwFunction
from conch.distance.dct import DctFunction
from conch.distance.xcorr import XcorrFunction
from conch.distance.point import PointFunction
| 30.166667 | 46 | 0.861878 | 24 | 181 | 6.5 | 0.5 | 0.230769 | 0.435897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093923 | 181 | 5 | 47 | 36.2 | 0.95122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c2e184ffb928b639e09812dd4a3a676e2c1523a5 | 7,631 | py | Python | src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output13_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output13_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output13_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
'vrf': {
'User-VLAN': {
'address_family': {
'ipv4': {
'routes': {
'0.0.0.0/0': {
'active': True,
'mbest': 0,
'metric': 0,
'next_hop': {
'next_hop_list': {
1: {
'best_ucast_nexthop': True,
'index': 1,
'metric': 0,
'next_hop': '192.168.1.1',
'next_hop_vrf': 'Internet_VLAN',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'updated': '38w2d',
},
},
},
'process_id': '64512',
'route': '0.0.0.0/0',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'tag': 65000,
'ubest': 1,
},
'10.0.0.0/24': {
'active': True,
'mbest': 0,
'metric': 0,
'next_hop': {
'next_hop_list': {
1: {
'best_ucast_nexthop': True,
'index': 1,
'metric': 0,
'next_hop': '169.254.0.1',
'next_hop_vrf': 'Server*VLAN',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'updated': '38w2d',
},
},
},
'process_id': '64512',
'route': '10.0.0.0/24',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'tag': 65001,
'ubest': 1,
},
'10.0.0.10/32': {
'active': True,
'mbest': 0,
'metric': 0,
'next_hop': {
'next_hop_list': {
1: {
'best_ucast_nexthop': True,
'index': 1,
'metric': 0,
'next_hop': '169.254.1.1',
'next_hop_af': '{101}',
'next_hop_vrf': 'LegacyLAN',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'updated': '38w2d',
},
},
},
'process_id': '64512',
'route': '10.0.0.10/32',
'route_preference': 20,
'source_protocol': 'bgp',
'source_protocol_status': 'external',
'tag': 65002,
'ubest': 1,
},
'10.2.0.0/24': {
'active': True,
'mbest': 0,
'metric': 0,
'next_hop': {
'next_hop_list': {
1: {
'best_ucast_nexthop': True,
'index': 1,
'metric': 0,
'next_hop': '172.16.0.1',
'next_hop_vrf': 'default-VLAN',
'route_preference': 200,
'source_protocol': 'bgp',
'source_protocol_status': 'internal',
'updated': '02:46:06',
'mpls_vpn': True,
},
},
},
'process_id': '64512',
'route': '10.2.0.0/24',
'route_preference': 200,
'source_protocol': 'bgp',
'source_protocol_status': 'internal',
'tag': 64513,
'ubest': 1,
},
'10.1.111.0/24': {
'active': True,
'mbest': 0,
'metric': 2000,
'next_hop': {
'next_hop_list': {
1: {
'best_ucast_nexthop': True,
'index': 1,
'metric': 2000,
'next_hop': '10.84.66.66',
'next_hop_vrf': 'default',
'route_preference': 200,
'source_protocol': 'bgp',
'source_protocol_status': 'internal',
'updated': '00:20:43',
'segid': 601011,
'asymmetric': True,
'tunnelid': '0x64646401',
'encap': 'vxlan',
},
},
},
'process_id': '100',
'route': '10.1.111.0/24',
'route_preference': 200,
'source_protocol': 'bgp',
'source_protocol_status': 'internal',
'tag': 200,
'ubest': 1,
},
}
}
}
}
}
}
| 49.551948 | 103 | 0.221203 | 381 | 7,631 | 4.191601 | 0.204724 | 0.092048 | 0.10645 | 0.14402 | 0.810269 | 0.7464 | 0.7464 | 0.730745 | 0.730745 | 0.730745 | 0 | 0.107531 | 0.686804 | 7,631 | 153 | 104 | 49.875817 | 0.560669 | 0 | 0 | 0.568627 | 0 | 0 | 0.209671 | 0.02883 | 0 | 0 | 0.00131 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6c19bd7999beb85164aa9520e4fea8b5f0a325cc | 29 | py | Python | yeeee/__init__.py | donand/yeeee | 25298b9363c919a291719e7dadfe39ef43dd3d32 | [
"MIT"
] | 5 | 2021-02-11T12:15:18.000Z | 2022-01-17T10:10:20.000Z | yeeee/__init__.py | donand/yeeee | 25298b9363c919a291719e7dadfe39ef43dd3d32 | [
"MIT"
] | null | null | null | yeeee/__init__.py | donand/yeeee | 25298b9363c919a291719e7dadfe39ef43dd3d32 | [
"MIT"
] | null | null | null | from .main import print_yeeee | 29 | 29 | 0.862069 | 5 | 29 | 4.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.103448 | 29 | 1 | 29 | 29 | 0.923077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
dd05f4cdae69635c0194378ca96fc40037faf8f5 | 1,307 | py | Python | tests/hadith_qu/test_number_queries.py | octabytes/search | 750124d2de0e349249e3183daccc83ba5a82af36 | [
"Apache-2.0"
] | null | null | null | tests/hadith_qu/test_number_queries.py | octabytes/search | 750124d2de0e349249e3183daccc83ba5a82af36 | [
"Apache-2.0"
] | null | null | null | tests/hadith_qu/test_number_queries.py | octabytes/search | 750124d2de0e349249e3183daccc83ba5a82af36 | [
"Apache-2.0"
] | null | null | null | from search.nlp.hadith_qu import HadithQU
# Queries
#
# user_query = "bukhari 123"
# user_query = "bukhari number 123"
# user_query = "bukhari #123"
# user_query = "bukhari number #123"
def test_query1():
user_query = "bukhari 123"
hadith_qu = HadithQU()
result = hadith_qu.analyze(user_query)
assert result["collection"] == "bukhari"
assert result["filters"][0]["name"] == "hadith_number"
assert result["filters"][0]["number"] == 123
def test_query2():
user_query = "bukhari number 123"
hadith_qu = HadithQU()
result = hadith_qu.analyze(user_query)
assert result["collection"] == "bukhari"
assert result["filters"][0]["name"] == "hadith_number"
assert result["filters"][0]["number"] == 123
def test_query3():
user_query = "bukhari #123"
hadith_qu = HadithQU()
result = hadith_qu.analyze(user_query)
assert result["collection"] == "bukhari"
assert result["filters"][0]["name"] == "hadith_number"
assert result["filters"][0]["number"] == 123
def test_query4():
user_query = "bukhari number #123"
hadith_qu = HadithQU()
result = hadith_qu.analyze(user_query)
assert result["collection"] == "bukhari"
assert result["filters"][0]["name"] == "hadith_number"
assert result["filters"][0]["number"] == 123
| 26.673469 | 58 | 0.65723 | 160 | 1,307 | 5.1875 | 0.1625 | 0.13012 | 0.154217 | 0.192771 | 0.912048 | 0.912048 | 0.912048 | 0.912048 | 0.912048 | 0.806024 | 0 | 0.044818 | 0.180566 | 1,307 | 48 | 59 | 27.229167 | 0.730159 | 0.098699 | 0 | 0.689655 | 0 | 0 | 0.236099 | 0 | 0 | 0 | 0 | 0 | 0.413793 | 1 | 0.137931 | false | 0 | 0.034483 | 0 | 0.172414 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
dd83a67642c843d56e60813f380ae19b65dc0ffd | 16,434 | py | Python | hallo/test/modules/convert/test_update_currencies.py | joshcoales/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 1 | 2018-05-19T22:27:20.000Z | 2018-05-19T22:27:20.000Z | hallo/test/modules/convert/test_update_currencies.py | joshcoales/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 75 | 2015-09-26T18:07:18.000Z | 2022-01-04T07:15:11.000Z | hallo/test/modules/convert/test_update_currencies.py | SpangleLabs/Hallo | 17145d8f76552ecd4cbc5caef8924bd2cf0cbf24 | [
"MIT"
] | 1 | 2021-04-10T12:02:47.000Z | 2021-04-10T12:02:47.000Z | import pytest
import hallo.modules.convert.convert_repo
import hallo.modules.convert.update_currencies
from hallo.events import EventMessage, EventHour
class MockUpdate:
def __init__(self, answer):
self.answer = answer
self.was_called = False
def method(self, arg1=None, arg2=None):
self.was_called = True
return self.answer
def method_throws(self, arg1=None, arg2=None):
self.was_called = True
raise Exception(self.answer)
def test_run(hallo_getter):
test_hallo = hallo_getter({"convert"})
update_all = hallo.modules.convert.update_currencies.UpdateCurrencies.update_all
mock_update_all = MockUpdate(["Check method called"])
hallo.modules.convert.update_currencies.UpdateCurrencies.update_all = mock_update_all.method
try:
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert data[0].text == "Check method called"
assert mock_update_all.was_called, "update_all() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_all = update_all
def test_passive_run(hallo_getter):
test_hallo = hallo_getter({"convert"})
update_all = hallo.modules.convert.update_currencies.UpdateCurrencies.update_all
mock_update_all = MockUpdate(["Check method called"])
hallo.modules.convert.update_currencies.UpdateCurrencies.update_all = mock_update_all.method
try:
test_hallo.function_dispatcher.dispatch_passive(EventHour())
test_hallo.test_server.get_send_data(0)
assert mock_update_all.was_called, "update_all() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_all = update_all
def test_update_all(hallo_getter):
test_hallo = hallo_getter({"convert"})
# Mock out methods
mock_ecb = MockUpdate(None)
mock_forex = MockUpdate(None)
mock_cryptonator = MockUpdate(None)
update_ecb = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data
update_forex = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data
update_cryptonator = (
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = (
mock_ecb.method
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = mock_forex.method
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
mock_cryptonator.method
)
try:
# Test update_all calls all 3, and gives reply
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert (
"Updated currency data from the European Central Bank" in data[0].text
)
assert "Updated currency data from Forex" in data[0].text
assert "Updated currency data from Cryptonator" in data[0].text
assert (
mock_ecb.was_called
), "update_from_european_bank_data() wasn't called."
assert mock_forex.was_called, "update_from_forex_data() wasn't called."
assert (
mock_cryptonator.was_called
), "update_from_cryptonator_data() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = update_ecb
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = update_forex
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
update_cryptonator
)
def test_update_all_fail_ecb(hallo_getter):
test_hallo = hallo_getter({"convert"})
# Mock out methods
mock_ecb = MockUpdate("HTTPException: 403")
mock_forex = MockUpdate(None)
mock_cryptonator = MockUpdate(None)
update_ecb = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data
update_forex = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data
update_cryptonator = (
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = (
mock_ecb.method_throws
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = mock_forex.method
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
mock_cryptonator.method
)
try:
# Test update_all calls all 3, and gives reply
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "Failed to update European Central Bank data" in data[0].text
assert "HTTPException: 403" in data[0].text
assert "Updated currency data from Forex" in data[0].text
assert "Updated currency data from Cryptonator" in data[0].text
assert (
mock_ecb.was_called
), "update_from_european_bank_data() wasn't called."
assert mock_forex.was_called, "update_from_forex_data() wasn't called."
assert (
mock_cryptonator.was_called
), "update_from_cryptonator_data() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = update_ecb
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = update_forex
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
update_cryptonator
)
def test_update_all_fail_forex(hallo_getter):
test_hallo = hallo_getter({"convert"})
# Mock out methods
mock_ecb = MockUpdate(None)
mock_forex = MockUpdate("HTTPException: 403")
mock_cryptonator = MockUpdate(None)
update_ecb = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data
update_forex = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data
update_cryptonator = (
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = (
mock_ecb.method
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = (
mock_forex.method_throws
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
mock_cryptonator.method
)
try:
# Test update_all calls all 3, and gives reply
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert (
"Updated currency data from the European Central Bank" in data[0].text
)
assert "Failed to update Forex data" in data[0].text
assert "HTTPException: 403" in data[0].text
assert "Updated currency data from Cryptonator" in data[0].text
assert (
mock_ecb.was_called
), "update_from_european_bank_data() wasn't called."
assert mock_forex.was_called, "update_from_forex_data() wasn't called."
assert (
mock_cryptonator.was_called
), "update_from_cryptonator_data() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = update_ecb
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = update_forex
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
update_cryptonator
)
def test_update_all_fail_cryptonator(hallo_getter):
test_hallo = hallo_getter({"convert"})
# Mock out methods
mock_ecb = MockUpdate(None)
mock_forex = MockUpdate(None)
mock_cryptonator = MockUpdate("HTTPException: 403")
update_ecb = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data
update_forex = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data
update_cryptonator = (
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = (
mock_ecb.method
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = mock_forex.method
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
mock_cryptonator.method_throws
)
try:
# Test update_all calls all 3, and gives reply
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert (
"Updated currency data from the European Central Bank" in data[0].text
)
assert "Updated currency data from Forex" in data[0].text
assert "Failed to update Cryptonator data" in data[0].text
assert "HTTPException: 403" in data[0].text
assert (
mock_ecb.was_called
), "update_from_european_bank_data() wasn't called."
assert mock_forex.was_called, "update_from_forex_data() wasn't called."
assert (
mock_cryptonator.was_called
), "update_from_cryptonator_data() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = update_ecb
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = update_forex
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
update_cryptonator
)
def test_update_all_fail_all(hallo_getter):
test_hallo = hallo_getter({"convert"})
# Mock out methods
mock_ecb = MockUpdate("HTTPException: 403")
mock_forex = MockUpdate("HTTPException: 500")
mock_cryptonator = MockUpdate("HTTPException: 404")
update_ecb = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data
update_forex = hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data
update_cryptonator = (
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = (
mock_ecb.method_throws
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = (
mock_forex.method_throws
)
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
mock_cryptonator.method_throws
)
try:
# Test update_all calls all 3, and gives reply
test_hallo.function_dispatcher.dispatch(
EventMessage(test_hallo.test_server, None, test_hallo.test_user, "update currencies")
)
data = test_hallo.test_server.get_send_data(1, test_hallo.test_user, EventMessage)
assert "Failed to update European Central Bank data" in data[0].text
assert "HTTPException: 403" in data[0].text
assert "Failed to update Forex data" in data[0].text
assert "HTTPException: 500" in data[0].text
assert "Failed to update Cryptonator data" in data[0].text
assert "HTTPException: 404" in data[0].text
assert (
mock_ecb.was_called
), "update_from_european_bank_data() wasn't called."
assert mock_forex.was_called, "update_from_forex_data() wasn't called."
assert (
mock_cryptonator.was_called
), "update_from_cryptonator_data() wasn't called."
finally:
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_european_bank_data = update_ecb
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_forex_data = update_forex
hallo.modules.convert.update_currencies.UpdateCurrencies.update_from_cryptonator_data = (
update_cryptonator
)
@pytest.mark.external_integration
def test_update_ecb():
# Set up test repo
test_repo = hallo.modules.convert.convert_repo.ConvertRepo()
test_type = hallo.modules.convert.convert_repo.ConvertType(test_repo, "currency")
test_type.base_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, ["EUR"], 1)
test_repo.add_type(test_type)
currency_codes = [
"USD",
"JPY",
"BGN",
"CZK",
"DKK",
"GBP",
"HUF",
"PLN",
"RON",
"SEK",
"CHF",
"ISK",
"NOK",
"HRK",
"RUB",
"TRY",
"AUD",
"BRL",
"CAD",
"CNY",
"HKD",
"IDR",
"ILS",
"INR",
"KRW",
"MXN",
"NZD",
"PHP",
"SGD",
"THB",
"ZAR",
]
for code in currency_codes:
test_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, [code], 0)
test_type.add_unit(test_unit)
# Run update_from_european_bank_data
c = hallo.modules.convert.update_currencies.UpdateCurrencies()
c.update_from_european_bank_data(test_repo)
# Check results
for code in currency_codes:
test_unit = test_type.get_unit_by_name(code)
assert test_unit.value != 0, "Currency was not updated: {}".format(code)
@pytest.mark.external_integration
def test_update_forex():
# Set up test repo
test_repo = hallo.modules.convert.convert_repo.ConvertRepo()
test_type = hallo.modules.convert.convert_repo.ConvertType(test_repo, "currency")
test_type.base_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, ["EUR"], 1)
test_repo.add_type(test_type)
currency_codes = [
"USD",
"CHF",
"GBP",
"JPY",
"AUD",
"CAD",
"SEK",
"NOK",
"NZD",
"TRY",
]
for code in currency_codes:
test_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, [code], 0)
test_type.add_unit(test_unit)
# Run update_from_forex_data
c = hallo.modules.convert.update_currencies.UpdateCurrencies()
c.update_from_forex_data(test_repo)
# Check results
for code in currency_codes:
test_unit = test_type.get_unit_by_name(code)
assert test_unit.value != 0, "Currency was not updated: {}".format(code)
@pytest.mark.external_integration
@pytest.mark.skip(reason="Cryptonator API occasionally returns HTML pages")
def test_update_cryptonator():
# Set up test repo
test_repo = hallo.modules.convert.convert_repo.ConvertRepo()
test_type = hallo.modules.convert.convert_repo.ConvertType(test_repo, "currency")
test_type.base_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, ["EUR"], 1)
test_repo.add_type(test_type)
currency_codes = ["LTC", "BTC", "BCH", "DOGE", "XMR", "ETH", "ETC", "DASH"]
for code in currency_codes:
test_unit = hallo.modules.convert.convert_repo.ConvertUnit(test_type, [code], 0)
test_type.add_unit(test_unit)
# Run update_from_cryptonator_data
c = hallo.modules.convert.update_currencies.UpdateCurrencies()
c.update_from_cryptonator_data(test_repo)
# Check results
for code in currency_codes:
test_unit = test_type.get_unit_by_name(code)
assert test_unit.value != 0, "Currency was not updated: {}".format(code)
| 43.361478 | 108 | 0.715042 | 1,988 | 16,434 | 5.596579 | 0.073441 | 0.073342 | 0.116124 | 0.123584 | 0.942297 | 0.934298 | 0.934298 | 0.926658 | 0.926658 | 0.920007 | 0 | 0.006304 | 0.198795 | 16,434 | 378 | 109 | 43.47619 | 0.838688 | 0.030242 | 0 | 0.730539 | 0 | 0 | 0.126783 | 0.027015 | 0 | 0 | 0 | 0 | 0.125749 | 1 | 0.038922 | false | 0.005988 | 0.011976 | 0 | 0.056886 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
dd96157e13896bd6c499c48a64bd8ede290ba278 | 31,940 | py | Python | data_block/two/tests/test_block.py | spectrum-dev/django-block-monolith | c17a1ef98ae813a4e94581e2e52a4a03f0e65769 | [
"MIT"
] | null | null | null | data_block/two/tests/test_block.py | spectrum-dev/django-block-monolith | c17a1ef98ae813a4e94581e2e52a4a03f0e65769 | [
"MIT"
] | null | null | null | data_block/two/tests/test_block.py | spectrum-dev/django-block-monolith | c17a1ef98ae813a4e94581e2e52a4a03f0e65769 | [
"MIT"
] | null | null | null | import responses
from django.test import TestCase
from blocks.event import event_ingestor
from data_block.two.exceptions import (
DataBlockTwoInvalidCandlestickException,
DataBlockTwoInvalidInputPayloadException,
)
class GetSymbol(TestCase):
def test_ok(self):
response = self.client.get("/DATA_BLOCK/2/cryptoName?name=btc")
self.assertDictEqual(
response.json(),
{
"response": [
{"value": "BITBTC", "label": "BitBTC (BITBTC)"},
{"value": "BTC", "label": "Bitcoin (BTC)"},
{"value": "BTCB", "label": "Bitcoin BEP2 (BTCB)"},
{"value": "BTCD", "label": "BitcoinDark (BTCD)"},
{"value": "BTCP", "label": "Bitcoin-Private (BTCP)"},
{"value": "EBTC", "label": "eBTC (EBTC)"},
{"value": "SBTC", "label": "Super-Bitcoin (SBTC)"},
]
},
)
def test_case_insensitive(self):
response = self.client.get("/DATA_BLOCK/2/cryptoName?name=Ethereum")
self.assertDictEqual(
response.json(),
{
"response": [
{"value": "ETC", "label": "Ethereum-Classic (ETC)"},
{"value": "ETH", "label": "Ethereum (ETH)"},
{"value": "ETHD", "label": "Ethereum-Dark (ETHD)"},
]
},
)
def test_no_results(self):
response = self.client.get("/DATA_BLOCK/2/cryptoName?name=no-results")
self.assertDictEqual(response.json(), {"response": []})
class PostRun(TestCase):
def setUp(self):
self.payload = {
"blockType": "DATA_BLOCK",
"blockId": 2,
}
@responses.activate
def test_one_minute_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=CRYPTO_INTRADAY&symbol=BTC&market=USD&interval=1min&outputsize=full&apikey=demo",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series Crypto (1min)": {
"2021-07-23 10:00:00": {
"1. open": "32324.52000",
"2. high": "32336.75000",
"3. low": "32314.00000",
"4. close": "32332.70000",
"5. volume": 26,
},
"2021-07-23 10:01:00": {
"1. open": "32332.71000",
"2. high": "32354.78000",
"3. low": "32323.01000",
"4. close": "32328.77000",
"5. volume": 29,
},
"2021-07-23 10:02:00": {
"1. open": "32328.77000",
"2. high": "32332.53000",
"3. low": "32293.15000",
"4. close": "32309.62000",
"5. volume": 64,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "1min",
"start_date": "2021-07-23 10:00:00",
"end_date": "2021-07-23 10:02:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": "32324.52000",
"high": "32336.75000",
"low": "32314.00000",
"close": "32332.70000",
"volume": 26,
"timestamp": "2021-07-23T10:00:00.000000000",
},
{
"open": "32332.71000",
"high": "32354.78000",
"low": "32323.01000",
"close": "32328.77000",
"volume": 29,
"timestamp": "2021-07-23T10:01:00.000000000",
},
{
"open": "32328.77000",
"high": "32332.53000",
"low": "32293.15000",
"close": "32309.62000",
"volume": 64,
"timestamp": "2021-07-23T10:02:00.000000000",
},
],
},
)
@responses.activate
def test_five_minute_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=CRYPTO_INTRADAY&symbol=BTC&market=USD&interval=5min&outputsize=full&apikey=demo",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series Crypto (5min)": {
"2021-07-23 15:00:00": {
"1. open": "32324.52000",
"2. high": "32336.75000",
"3. low": "32314.00000",
"4. close": "32332.70000",
"5. volume": 26,
},
"2021-07-23 15:05:00": {
"1. open": "32332.71000",
"2. high": "32354.78000",
"3. low": "32323.01000",
"4. close": "32328.77000",
"5. volume": 29,
},
"2021-07-23 15:10:00": {
"1. open": "32328.77000",
"2. high": "32332.53000",
"3. low": "32293.15000",
"4. close": "32309.62000",
"5. volume": 64,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "5min",
"start_date": "2021-07-23 15:00:00",
"end_date": "2021-07-23 15:10:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertEqual(
response,
{
"response": [
{
"open": "32324.52000",
"high": "32336.75000",
"low": "32314.00000",
"close": "32332.70000",
"volume": 26,
"timestamp": "2021-07-23T15:00:00.000000000",
},
{
"open": "32332.71000",
"high": "32354.78000",
"low": "32323.01000",
"close": "32328.77000",
"volume": 29,
"timestamp": "2021-07-23T15:05:00.000000000",
},
{
"open": "32328.77000",
"high": "32332.53000",
"low": "32293.15000",
"close": "32309.62000",
"volume": 64,
"timestamp": "2021-07-23T15:10:00.000000000",
},
],
},
)
@responses.activate
def test_fifteen_minute_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=CRYPTO_INTRADAY&symbol=BTC&market=USD&interval=15min&outputsize=full&apikey=demo",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series Crypto (15min)": {
"2021-07-23 13:00:00": {
"1. open": "32324.52000",
"2. high": "32336.75000",
"3. low": "32314.00000",
"4. close": "32332.70000",
"5. volume": 26,
},
"2021-07-23 13:15:00": {
"1. open": "32332.71000",
"2. high": "32354.78000",
"3. low": "32323.01000",
"4. close": "32328.77000",
"5. volume": 29,
},
"2021-07-23 13:30:00": {
"1. open": "32328.77000",
"2. high": "32332.53000",
"3. low": "32293.15000",
"4. close": "32309.62000",
"5. volume": 64,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "15min",
"start_date": "2021-07-23 13:00:00",
"end_date": "2021-07-23 13:30:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": "32324.52000",
"high": "32336.75000",
"low": "32314.00000",
"close": "32332.70000",
"volume": 26,
"timestamp": "2021-07-23T13:00:00.000000000",
},
{
"open": "32332.71000",
"high": "32354.78000",
"low": "32323.01000",
"close": "32328.77000",
"volume": 29,
"timestamp": "2021-07-23T13:15:00.000000000",
},
{
"open": "32328.77000",
"high": "32332.53000",
"low": "32293.15000",
"close": "32309.62000",
"volume": 64,
"timestamp": "2021-07-23T13:30:00.000000000",
},
]
},
)
@responses.activate
def test_thirty_minute_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=CRYPTO_INTRADAY&symbol=BTC&market=USD&interval=30min&outputsize=full&apikey=demo",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series Crypto (1min)": {
"2021-07-23 13:00:00": {
"1. open": "32324.52000",
"2. high": "32336.75000",
"3. low": "32314.00000",
"4. close": "32332.70000",
"5. volume": 26,
},
"2021-07-23 13:30:00": {
"1. open": "32332.71000",
"2. high": "32354.78000",
"3. low": "32323.01000",
"4. close": "32328.77000",
"5. volume": 29,
},
"2021-07-23 14:00:00": {
"1. open": "32328.77000",
"2. high": "32332.53000",
"3. low": "32293.15000",
"4. close": "32309.62000",
"5. volume": 64,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "30min",
"start_date": "2021-07-23 13:00:00",
"end_date": "2021-07-23 14:00:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertEqual(
response,
{
"response": [
{
"open": "32324.52000",
"high": "32336.75000",
"low": "32314.00000",
"close": "32332.70000",
"volume": 26,
"timestamp": "2021-07-23T13:00:00.000000000",
},
{
"open": "32332.71000",
"high": "32354.78000",
"low": "32323.01000",
"close": "32328.77000",
"volume": 29,
"timestamp": "2021-07-23T13:30:00.000000000",
},
{
"open": "32328.77000",
"high": "32332.53000",
"low": "32293.15000",
"close": "32309.62000",
"volume": 64,
"timestamp": "2021-07-23T14:00:00.000000000",
},
]
},
)
@responses.activate
def test_sixty_minute_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=CRYPTO_INTRADAY&symbol=BTC&market=USD&interval=60min&outputsize=full&apikey=demo",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series Crypto (1min)": {
"2021-07-23 13:00:00": {
"1. open": "32324.52000",
"2. high": "32336.75000",
"3. low": "32314.00000",
"4. close": "32332.70000",
"5. volume": 26,
},
"2021-07-23 14:00:00": {
"1. open": "32332.71000",
"2. high": "32354.78000",
"3. low": "32323.01000",
"4. close": "32328.77000",
"5. volume": 29,
},
"2021-07-23 15:00:00": {
"1. open": "32328.77000",
"2. high": "32332.53000",
"3. low": "32293.15000",
"4. close": "32309.62000",
"5. volume": 64,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "60min",
"start_date": "2021-07-23 13:00:00",
"end_date": "2021-07-23 15:00:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": "32324.52000",
"high": "32336.75000",
"low": "32314.00000",
"close": "32332.70000",
"volume": 26,
"timestamp": "2021-07-23T13:00:00.000000000",
},
{
"open": "32332.71000",
"high": "32354.78000",
"low": "32323.01000",
"close": "32328.77000",
"volume": 29,
"timestamp": "2021-07-23T14:00:00.000000000",
},
{
"open": "32328.77000",
"high": "32332.53000",
"low": "32293.15000",
"close": "32309.62000",
"volume": 64,
"timestamp": "2021-07-23T15:00:00.000000000",
},
]
},
)
@responses.activate
def test_one_day_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=DIGITAL_CURRENCY_DAILY&symbol=BTC&market=USD&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series (Digital Currency Daily)": {
"2021-06-10 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
"2021-06-11 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
"2021-06-12 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "1day",
"start_date": "2021-06-10 00:00:00",
"end_date": "2021-06-12 00:00:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertEqual(
response,
{
"response": [
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-10T00:00:00.000000000",
},
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-11T00:00:00.000000000",
},
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-12T00:00:00.000000000",
},
]
},
)
@responses.activate
def test_one_week_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=DIGITAL_CURRENCY_WEEKLY&symbol=BTC&market=USD&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series (Digital Currency Weekly)": {
"2021-06-13 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
"2021-06-20 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "1week",
"start_date": "2021-06-10 00:00:00",
"end_date": "2021-06-21 00:00:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertEqual(
response,
{
"response": [
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-13T00:00:00.000000000",
},
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-20T00:00:00.000000000",
},
]
},
)
@responses.activate
def test_one_month_candlestick_ok(self):
responses.add(
responses.GET,
"https://www.alphavantage.co/query?function=DIGITAL_CURRENCY_MONTHLY&symbol=BTC&market=USD&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Crypto Intraday (1min) Time Series",
"2. Digital Currency Code": "BTC",
"3. Digital Currency Name": "Bitcoin",
"4. Market Code": "USD",
"5. Market Name": "United States Dollar",
"6. Last Refreshed": "2021-07-26 07:07:00",
"7. Interval": "1min",
"8. Output Size": "Full size",
"9. Time Zone": "UTC",
},
"Time Series (Digital Currency Monthly)": {
"2021-04-30 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
"2021-05-31 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
"2021-06-30 00:00:00": {
"1a. open (USD)": "32324.52000",
"1b. open (USD)": "32324.52000",
"2a. high (USD)": "32336.75000",
"2b. high (USD)": "32336.75000",
"3a. low (USD)": "32314.00000",
"3b. low (USD)": "32314.00000",
"4a. close (USD)": "32332.70000",
"4b. close (USD)": "32332.70000",
"5. volume (USD)": 26,
"6. market cap (USD)": 26,
},
},
},
status=200,
)
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "1month",
"start_date": "2021-04-01 00:00:00",
"end_date": "2021-07-01 00:00:00",
},
"outputs": {},
}
response = event_ingestor(payload)
self.assertEqual(
response,
{
"response": [
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-04-30T00:00:00.000000000",
},
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-05-31T00:00:00.000000000",
},
{
"open": 32324.52,
"high": 32336.75,
"low": 32314.0,
"close": 32332.7,
"5. volume (USD)": 26.0,
"timestamp": "2021-06-30T00:00:00.000000000",
},
]
},
)
def test_failure_missing_crypto_name_input(self):
payload = {
**self.payload,
"inputs": {
"candlestick": "1month",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
with self.assertRaises(DataBlockTwoInvalidInputPayloadException):
event_ingestor(payload)
def test_failure_missing_candlestick_input(self):
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
with self.assertRaises(DataBlockTwoInvalidInputPayloadException):
event_ingestor(payload)
def test_failure_invalid_candlestick_frequency(self):
payload = {
**self.payload,
"inputs": {
"crypto_name": "BTC",
"candlestick": "INVALID",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
with self.assertRaises(DataBlockTwoInvalidCandlestickException):
event_ingestor(payload)
| 38.205742 | 138 | 0.354258 | 2,566 | 31,940 | 4.366719 | 0.081839 | 0.025346 | 0.017849 | 0.024275 | 0.909237 | 0.898884 | 0.873806 | 0.864257 | 0.841053 | 0.841053 | 0 | 0.236548 | 0.504853 | 31,940 | 835 | 139 | 38.251497 | 0.471957 | 0 | 0 | 0.664549 | 0 | 0.010165 | 0.327677 | 0.024358 | 0 | 0 | 0 | 0 | 0.017789 | 1 | 0.01906 | false | 0 | 0.005083 | 0 | 0.026684 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
06b5e5d6c097b1981dfbb86372f52172e750af49 | 946 | py | Python | bin/firstLawofMotion.py | newtoallofthis123/python-dev-bin | b066b008c7be73d8d9a076333fdd7c93a8c4c997 | [
"MIT"
] | 3 | 2021-08-04T07:10:52.000Z | 2021-08-04T08:02:43.000Z | bin/firstLawofMotion.py | newtoallofthis123/python-dev-bin | b066b008c7be73d8d9a076333fdd7c93a8c4c997 | [
"MIT"
] | null | null | null | bin/firstLawofMotion.py | newtoallofthis123/python-dev-bin | b066b008c7be73d8d9a076333fdd7c93a8c4c997 | [
"MIT"
] | null | null | null | # v = u+at
choice = str.lower(input("What do you want to calculate? (v, u, a, t)"))
if choice == 'v':
u = float(input("What is the value of u(m/s): "))
a = float(input("What is the value of a(ms^-2): "))
t = float(input("What is the value of t(sec): "))
print(u + a *t)
elif choice == 'u':
v = float(input("What is the value of v(m/s): "))
a = float(input("What is the value of a(ms^-2): "))
t = float(input("What is the value of t(sec): "))
print(v - a *t)
elif choice == 'a':
u = float(input("What is the value of u(m/s): "))
v = float(input("What is the value of v(m/s): "))
t = float(input("What is the value of t(sec): "))
print((v-u)/t)
elif choice == 't':
u = float(input("What is the value of u(m/s): "))
a = float(input("What is the value of a(ms^-2): "))
v = float(input("What is the value of v(m/s): "))
print((v-u)/a)
else:
print("Error Something went wrong!")
| 36.384615 | 72 | 0.552854 | 180 | 946 | 2.905556 | 0.183333 | 0.223709 | 0.321224 | 0.367113 | 0.732314 | 0.732314 | 0.732314 | 0.732314 | 0.732314 | 0.732314 | 0 | 0.004178 | 0.241015 | 946 | 25 | 73 | 37.84 | 0.724234 | 0.008457 | 0 | 0.521739 | 0 | 0 | 0.457265 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.217391 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
06eabc940011545e0ebab9ab3ffb25653fad0015 | 212 | wsgi | Python | webapp-python-flask-sqlalchemy-mysql/webapp_python_flask_sqlalchemy_mysql/webapp_python_flask_sqlalchemy_mysql.wsgi | james-w-balcomb/webapp-python | 941305a643c713ee77834af7d39280db1105b14a | [
"Apache-2.0"
] | null | null | null | webapp-python-flask-sqlalchemy-mysql/webapp_python_flask_sqlalchemy_mysql/webapp_python_flask_sqlalchemy_mysql.wsgi | james-w-balcomb/webapp-python | 941305a643c713ee77834af7d39280db1105b14a | [
"Apache-2.0"
] | 13 | 2020-02-11T23:27:42.000Z | 2022-03-11T23:34:41.000Z | webapp-python-flask-sqlalchemy-mysql/webapp_python_flask_sqlalchemy_mysql/webapp_python_flask_sqlalchemy_mysql.wsgi | james-w-balcomb/webapp-python | 941305a643c713ee77834af7d39280db1105b14a | [
"Apache-2.0"
] | null | null | null | import sys
sys.path.insert(0, "/var/www/python/boringbalcomb.com/webapp-python-flask-sqlalchemy-mysql/")
from webapp_python_flask_sqlalchemy_mysql import webapp_python_flask_sqlalchemy_mysql_app as application
| 35.333333 | 104 | 0.858491 | 31 | 212 | 5.580645 | 0.580645 | 0.208092 | 0.294798 | 0.468208 | 0.554913 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005 | 0.056604 | 212 | 5 | 105 | 42.4 | 0.86 | 0 | 0 | 0 | 0 | 0.333333 | 0.334906 | 0.334906 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
664348a6385380b134147496c8d7d0987785d57d | 4,363 | py | Python | tests/test_prediction/test_target_mean_regressor.py | noahjgreen295/feature_engine | 77248ade812f03bb1b9c0c129320f0c0baad61ed | [
"BSD-3-Clause"
] | 1 | 2021-12-17T21:54:47.000Z | 2021-12-17T21:54:47.000Z | tests/test_prediction/test_target_mean_regressor.py | noahjgreen295/feature_engine | 77248ade812f03bb1b9c0c129320f0c0baad61ed | [
"BSD-3-Clause"
] | 6 | 2021-12-08T12:41:19.000Z | 2021-12-22T23:34:39.000Z | tests/test_prediction/test_target_mean_regressor.py | noahjgreen295/feature_engine | 77248ade812f03bb1b9c0c129320f0c0baad61ed | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from feature_engine._prediction.target_mean_regressor import TargetMeanRegressor
def test_regressor_categorical_variables(df_regression):
X, y = df_regression
tr = TargetMeanRegressor(variables="cat_var_A")
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
2.0,
2.0,
2.0,
2.0,
2.0,
3.0,
3.0,
3.0,
3.0,
3.0,
]
)
assert np.array_equal(pred, exp_pred)
tr = TargetMeanRegressor(variables="cat_var_B")
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.16666667,
0.16666667,
0.16666667,
0.16666667,
0.16666667,
0.16666667,
1.5,
1.5,
1.5,
1.5,
1.5,
1.5,
1.5,
1.5,
2.83333333,
2.83333333,
2.83333333,
2.83333333,
2.83333333,
2.83333333,
]
)
assert np.allclose(pred, exp_pred)
tr = TargetMeanRegressor(variables=["cat_var_A", "cat_var_B"])
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.08333333,
0.08333333,
0.08333333,
0.08333333,
0.08333333,
0.58333333,
1.25,
1.25,
1.25,
1.25,
1.75,
1.75,
1.75,
1.75,
2.41666667,
2.91666667,
2.91666667,
2.91666667,
2.91666667,
2.91666667,
]
)
assert np.allclose(pred, exp_pred)
def test_classifier_numerical_variables(df_regression):
X, y = df_regression
tr = TargetMeanRegressor(variables="num_var_A", bins=2)
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
0.5,
2.5,
2.5,
2.5,
2.5,
2.5,
2.5,
2.5,
2.5,
2.5,
2.5,
]
)
assert np.array_equal(pred, exp_pred)
tr = TargetMeanRegressor(variables="num_var_B", bins=2)
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.7,
0.7,
0.7,
0.7,
0.7,
0.7,
0.7,
0.7,
2.3,
2.3,
0.7,
0.7,
2.3,
2.3,
2.3,
2.3,
2.3,
2.3,
2.3,
2.3,
]
)
np.array_equal(pred, exp_pred)
tr = TargetMeanRegressor(variables=["num_var_A", "num_var_B"], bins=2)
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.6,
0.6,
0.6,
0.6,
0.6,
0.6,
0.6,
0.6,
1.4,
1.4,
1.6,
1.6,
2.4,
2.4,
2.4,
2.4,
2.4,
2.4,
2.4,
2.4,
]
)
assert np.array_equal(pred, exp_pred)
def test_classifier_all_variables(df_regression):
X, y = df_regression
tr = TargetMeanRegressor(bins=2)
tr.fit(X, y)
pred = tr.predict(X)
exp_pred = np.array(
[
0.34166667,
0.34166667,
0.34166667,
0.34166667,
0.34166667,
0.59166667,
0.925,
0.925,
1.325,
1.325,
1.675,
1.675,
2.075,
2.075,
2.40833333,
2.65833333,
2.65833333,
2.65833333,
2.65833333,
2.65833333,
]
)
assert np.allclose(pred, exp_pred)
| 18.028926 | 80 | 0.363511 | 507 | 4,363 | 3.023669 | 0.122288 | 0.063927 | 0.019569 | 0.023483 | 0.870189 | 0.870189 | 0.784736 | 0.757339 | 0.662753 | 0.542074 | 0 | 0.271186 | 0.526702 | 4,363 | 241 | 81 | 18.103734 | 0.471186 | 0 | 0 | 0.813725 | 0 | 0 | 0.016502 | 0 | 0 | 0 | 0 | 0 | 0.029412 | 1 | 0.014706 | false | 0 | 0.009804 | 0 | 0.02451 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
664e890b45619f5d81828a3b93ee79668a28f7e1 | 355 | py | Python | mlprogram/metrics/__init__.py | HiroakiMikami/mlprogram | 573e94c567064705fa65267dd83946bf183197de | [
"MIT"
] | 9 | 2020-05-24T11:25:01.000Z | 2022-03-28T15:32:10.000Z | mlprogram/metrics/__init__.py | HiroakiMikami/mlprogram | 573e94c567064705fa65267dd83946bf183197de | [
"MIT"
] | 87 | 2020-05-09T08:56:55.000Z | 2022-03-31T14:46:45.000Z | mlprogram/metrics/__init__.py | HiroakiMikami/NL2Prog | 573e94c567064705fa65267dd83946bf183197de | [
"MIT"
] | 3 | 2021-02-22T20:38:29.000Z | 2021-11-11T18:48:44.000Z | from mlprogram.metrics.accuracy import Accuracy # noqa
from mlprogram.metrics.bleu import Bleu # noqa
from mlprogram.metrics.error_correct_rate import ErrorCorrectRate # noqa
from mlprogram.metrics.iou import Iou # noqa
from mlprogram.metrics.metric import use_environment # noqa
from mlprogram.metrics.test_case_result import TestCaseResult # noqa
| 50.714286 | 73 | 0.830986 | 47 | 355 | 6.170213 | 0.404255 | 0.268966 | 0.413793 | 0.413793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11831 | 355 | 6 | 74 | 59.166667 | 0.926518 | 0.08169 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9ff208e6601458699ef4cb80c40e680fa4c53fae | 138 | py | Python | autograd/numpy/use_gpu_numpy.py | yukoba/autograd | 8f49fd0b7b274f63b47dd3a981268b278d47c6f7 | [
"MIT"
] | 2 | 2018-07-12T00:06:27.000Z | 2019-11-27T06:32:04.000Z | autograd/numpy/use_gpu_numpy.py | barak/autograd | 4a693a2daac6435cb6bddf6b18bd918a46591a82 | [
"MIT"
] | null | null | null | autograd/numpy/use_gpu_numpy.py | barak/autograd | 4a693a2daac6435cb6bddf6b18bd918a46591a82 | [
"MIT"
] | 1 | 2020-06-13T08:26:36.000Z | 2020-06-13T08:26:36.000Z | from __future__ import absolute_import
import os
def use_gpu_numpy():
return os.environ.get('AUTOGRAD_USE_GPU_NUMPY', 'no') == 'yes'
| 23 | 66 | 0.753623 | 21 | 138 | 4.47619 | 0.714286 | 0.12766 | 0.234043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 138 | 5 | 67 | 27.6 | 0.783333 | 0 | 0 | 0 | 0 | 0 | 0.195652 | 0.15942 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
c69d059aa60fe0f67dc0902a05bbb35e3ecf3a00 | 20,335 | py | Python | mylib/graph.py | suriya-it19/Business-Analytics-with-People-Counter | 6f3fca5b5d0735b9fa25c554a85ba6a4823da47c | [
"MIT"
] | 1 | 2021-02-11T14:31:35.000Z | 2021-02-11T14:31:35.000Z | mylib/graph.py | suriya-it19/Business-Analytics-with-People-Counter | 6f3fca5b5d0735b9fa25c554a85ba6a4823da47c | [
"MIT"
] | null | null | null | mylib/graph.py | suriya-it19/Business-Analytics-with-People-Counter | 6f3fca5b5d0735b9fa25c554a85ba6a4823da47c | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import streamlit as st
import streamlit.components.v1 as components
def first(mod, area):
"""
# Trend Analysis using past data
"""
def main():
"""
Total Dish price Yearwise
"""
html_temp9 = """<div class='tableauPlaceholder' id='viz1613019369548' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/DishPriceYearwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/DishPriceYearwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/DishPriceYearwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019369548'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp9, height=900)
"""
Total Dish price Monthwise
"""
html_temp10 = """<div class='tableauPlaceholder' id='viz1613019393501' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/DishPriceMonthwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/DishPriceMonthwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/DishPriceMonthwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019393501'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp10, height=900)
"""
Total Plates sold Monthwise
"""
html_temp3 = """<div class='tableauPlaceholder' id='viz1613019099515' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldMonthwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/PlatesSoldMonthwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldMonthwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019099515'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp3, height=900)
"""
Total Plates sold Yearwise
"""
html_temp4 = """<div class='tableauPlaceholder' id='viz1613019161471' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldYearwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/PlatesSoldYearwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldYearwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019161471'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp4, height=900)
"""
Total Plates sold Weekwise
"""
html_temp5 = """<div class='tableauPlaceholder' id='viz1613019207244' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldWeekwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/PlatesSoldWeekwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/PlatesSoldWeekwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019207244'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp5, height=900)
"""
Total Price Yearwise
"""
html_temp = """<div class='tableauPlaceholder' id='viz1612960191058' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/TotalPriceYearwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/TotalPriceYearwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/TotalPriceYearwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1612960191058'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp, height=900)
"""
Total Price Monthwise
"""
html_temp1 = """<div class='tableauPlaceholder' id='viz1613019016265' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/ToalPriceMonthWise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/ToalPriceMonthWise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/ToalPriceMonthWise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019016265'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp1, height=900)
"""
Total Price Weekwise
"""
html_temp2 = """<div class='tableauPlaceholder' id='viz1613019068472' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/ToalPriceWeekWise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/ToalPriceWeekWise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/ToalPriceWeekWise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019068472'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp2, height=900)
"""
Total Ratings Monthwise
"""
html_temp6 = """<div class='tableauPlaceholder' id='viz1613019233501' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsMonthwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/RatingsMonthwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsMonthwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019233501'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp6, height=900)
"""
Total Ratings Yearwise
"""
html_temp7 = """<div class='tableauPlaceholder' id='viz1613019289320' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsYearwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/RatingsYearwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsYearwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019289320'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp7, height=900)
"""
Total Ratings Weekwise
"""
html_temp8 = """<div class='tableauPlaceholder' id='viz1613019317354' style='position: relative'><noscript><a href='#'><img alt=' ' src='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsWeekwise/1_rss.png' style='border: none' /></a></noscript><object class='tableauViz' style='display:none;'><param name='host_url' value='https%3A%2F%2Fpublic.tableau.com%2F' /> <param name='embed_code_version' value='3' /> <param name='site_root' value='' /><param name='name' value='Restaurant_16128584650850/RatingsWeekwise' /><param name='tabs' value='no' /><param name='toolbar' value='yes' /><param name='static_image' value='https://public.tableau.com/static/images/Re/Restaurant_16128584650850/RatingsWeekwise/1.png' /> <param name='animate_transition' value='yes' /><param name='display_static_image' value='yes' /><param name='display_spinner' value='yes' /><param name='display_overlay' value='yes' /><param name='display_count' value='yes' /><param name='language' value='en' /></object></div> <script type='text/javascript'> var divElement = document.getElementById('viz1613019317354'); var vizElement = divElement.getElementsByTagName('object')[0]; vizElement.style.width='100%';vizElement.style.height=(divElement.offsetWidth*0.75)+'px'; var scriptElement = document.createElement('script'); scriptElement.src = 'https://public.tableau.com/javascripts/api/viz_v1.js'; vizElement.parentNode.insertBefore(scriptElement, vizElement); </script>"""
components.html(html_temp8, height=900)
main()
# chart_data = pd.DataFrame(np.random.randn(20, 3), columns=["a", "b", "c"])
# st.line_chart(chart_data)
# map_data = pd.DataFrame(
# np.random.randn(200, 2) / [100, 50] + [9.9252, 78.1198], columns=["lat", "lon"]
# )
# st.map(map_data)
| 228.483146 | 1,704 | 0.666831 | 2,396 | 20,335 | 5.574708 | 0.068865 | 0.096354 | 0.064236 | 0.084001 | 0.888598 | 0.883207 | 0.879015 | 0.879015 | 0.879015 | 0.879015 | 0 | 0.079303 | 0.152938 | 20,335 | 88 | 1,705 | 231.079545 | 0.696139 | 0.014163 | 0 | 0 | 0 | 0.37931 | 0.941475 | 0.502151 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068966 | false | 0 | 0.137931 | 0 | 0.206897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
05c80252a5334f194e2fa5f06d07c22965af35ed | 23,222 | py | Python | sdk/python/pulumi_azure/keyvault/secret.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 109 | 2018-06-18T00:19:44.000Z | 2022-02-20T05:32:57.000Z | sdk/python/pulumi_azure/keyvault/secret.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 663 | 2018-06-18T21:08:46.000Z | 2022-03-31T20:10:11.000Z | sdk/python/pulumi_azure/keyvault/secret.py | henriktao/pulumi-azure | f1cbcf100b42b916da36d8fe28be3a159abaf022 | [
"ECL-2.0",
"Apache-2.0"
] | 41 | 2018-07-19T22:37:38.000Z | 2022-03-14T10:56:26.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SecretArgs', 'Secret']
@pulumi.input_type
class SecretArgs:
def __init__(__self__, *,
key_vault_id: pulumi.Input[str],
value: pulumi.Input[str],
content_type: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
not_before_date: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Secret resource.
:param pulumi.Input[str] key_vault_id: The ID of the Key Vault where the Secret should be created.
:param pulumi.Input[str] value: Specifies the value of the Key Vault Secret.
:param pulumi.Input[str] content_type: Specifies the content type for the Key Vault Secret.
:param pulumi.Input[str] expiration_date: Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[str] name: Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
:param pulumi.Input[str] not_before_date: Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "key_vault_id", key_vault_id)
pulumi.set(__self__, "value", value)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if expiration_date is not None:
pulumi.set(__self__, "expiration_date", expiration_date)
if name is not None:
pulumi.set(__self__, "name", name)
if not_before_date is not None:
pulumi.set(__self__, "not_before_date", not_before_date)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> pulumi.Input[str]:
"""
The ID of the Key Vault where the Secret should be created.
"""
return pulumi.get(self, "key_vault_id")
@key_vault_id.setter
def key_vault_id(self, value: pulumi.Input[str]):
pulumi.set(self, "key_vault_id", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
Specifies the value of the Key Vault Secret.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the content type for the Key Vault Secret.
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> Optional[pulumi.Input[str]]:
"""
Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "expiration_date")
@expiration_date.setter
def expiration_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration_date", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="notBeforeDate")
def not_before_date(self) -> Optional[pulumi.Input[str]]:
"""
Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "not_before_date")
@not_before_date.setter
def not_before_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_before_date", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _SecretState:
def __init__(__self__, *,
content_type: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
not_before_date: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
value: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
versionless_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Secret resources.
:param pulumi.Input[str] content_type: Specifies the content type for the Key Vault Secret.
:param pulumi.Input[str] expiration_date: Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[str] key_vault_id: The ID of the Key Vault where the Secret should be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
:param pulumi.Input[str] not_before_date: Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] value: Specifies the value of the Key Vault Secret.
:param pulumi.Input[str] version: The current version of the Key Vault Secret.
:param pulumi.Input[str] versionless_id: The Base ID of the Key Vault Secret.
"""
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if expiration_date is not None:
pulumi.set(__self__, "expiration_date", expiration_date)
if key_vault_id is not None:
pulumi.set(__self__, "key_vault_id", key_vault_id)
if name is not None:
pulumi.set(__self__, "name", name)
if not_before_date is not None:
pulumi.set(__self__, "not_before_date", not_before_date)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if value is not None:
pulumi.set(__self__, "value", value)
if version is not None:
pulumi.set(__self__, "version", version)
if versionless_id is not None:
pulumi.set(__self__, "versionless_id", versionless_id)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the content type for the Key Vault Secret.
"""
return pulumi.get(self, "content_type")
@content_type.setter
def content_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content_type", value)
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> Optional[pulumi.Input[str]]:
"""
Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "expiration_date")
@expiration_date.setter
def expiration_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expiration_date", value)
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Key Vault where the Secret should be created.
"""
return pulumi.get(self, "key_vault_id")
@key_vault_id.setter
def key_vault_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_vault_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="notBeforeDate")
def not_before_date(self) -> Optional[pulumi.Input[str]]:
"""
Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "not_before_date")
@not_before_date.setter
def not_before_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "not_before_date", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the value of the Key Vault Secret.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
The current version of the Key Vault Secret.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="versionlessId")
def versionless_id(self) -> Optional[pulumi.Input[str]]:
"""
The Base ID of the Key Vault Secret.
"""
return pulumi.get(self, "versionless_id")
@versionless_id.setter
def versionless_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "versionless_id", value)
class Secret(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
content_type: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
not_before_date: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
value: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Key Vault Secret.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
current = azure.core.get_client_config()
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_key_vault = azure.keyvault.KeyVault("exampleKeyVault",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
tenant_id=current.tenant_id,
sku_name="premium",
soft_delete_retention_days=7,
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=[
"create",
"get",
],
secret_permissions=[
"set",
"get",
"delete",
"purge",
"recover",
],
)])
example_secret = azure.keyvault.Secret("exampleSecret",
value="szechuan",
key_vault_id=example_key_vault.id)
```
## Import
Key Vault Secrets which are Enabled can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:keyvault/secret:Secret example "https://example-keyvault.vault.azure.net/secrets/example/fdf067c93bbb4b22bff4d8b7a9a56217"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] content_type: Specifies the content type for the Key Vault Secret.
:param pulumi.Input[str] expiration_date: Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[str] key_vault_id: The ID of the Key Vault where the Secret should be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
:param pulumi.Input[str] not_before_date: Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] value: Specifies the value of the Key Vault Secret.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SecretArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Key Vault Secret.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
current = azure.core.get_client_config()
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_key_vault = azure.keyvault.KeyVault("exampleKeyVault",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
tenant_id=current.tenant_id,
sku_name="premium",
soft_delete_retention_days=7,
access_policies=[azure.keyvault.KeyVaultAccessPolicyArgs(
tenant_id=current.tenant_id,
object_id=current.object_id,
key_permissions=[
"create",
"get",
],
secret_permissions=[
"set",
"get",
"delete",
"purge",
"recover",
],
)])
example_secret = azure.keyvault.Secret("exampleSecret",
value="szechuan",
key_vault_id=example_key_vault.id)
```
## Import
Key Vault Secrets which are Enabled can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:keyvault/secret:Secret example "https://example-keyvault.vault.azure.net/secrets/example/fdf067c93bbb4b22bff4d8b7a9a56217"
```
:param str resource_name: The name of the resource.
:param SecretArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SecretArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
content_type: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
not_before_date: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
value: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SecretArgs.__new__(SecretArgs)
__props__.__dict__["content_type"] = content_type
__props__.__dict__["expiration_date"] = expiration_date
if key_vault_id is None and not opts.urn:
raise TypeError("Missing required property 'key_vault_id'")
__props__.__dict__["key_vault_id"] = key_vault_id
__props__.__dict__["name"] = name
__props__.__dict__["not_before_date"] = not_before_date
__props__.__dict__["tags"] = tags
if value is None and not opts.urn:
raise TypeError("Missing required property 'value'")
__props__.__dict__["value"] = value
__props__.__dict__["version"] = None
__props__.__dict__["versionless_id"] = None
super(Secret, __self__).__init__(
'azure:keyvault/secret:Secret',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
content_type: Optional[pulumi.Input[str]] = None,
expiration_date: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
not_before_date: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
value: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
versionless_id: Optional[pulumi.Input[str]] = None) -> 'Secret':
"""
Get an existing Secret resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] content_type: Specifies the content type for the Key Vault Secret.
:param pulumi.Input[str] expiration_date: Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[str] key_vault_id: The ID of the Key Vault where the Secret should be created.
:param pulumi.Input[str] name: Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
:param pulumi.Input[str] not_before_date: Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] value: Specifies the value of the Key Vault Secret.
:param pulumi.Input[str] version: The current version of the Key Vault Secret.
:param pulumi.Input[str] versionless_id: The Base ID of the Key Vault Secret.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SecretState.__new__(_SecretState)
__props__.__dict__["content_type"] = content_type
__props__.__dict__["expiration_date"] = expiration_date
__props__.__dict__["key_vault_id"] = key_vault_id
__props__.__dict__["name"] = name
__props__.__dict__["not_before_date"] = not_before_date
__props__.__dict__["tags"] = tags
__props__.__dict__["value"] = value
__props__.__dict__["version"] = version
__props__.__dict__["versionless_id"] = versionless_id
return Secret(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="contentType")
def content_type(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the content type for the Key Vault Secret.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="expirationDate")
def expiration_date(self) -> pulumi.Output[Optional[str]]:
"""
Expiration UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "expiration_date")
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> pulumi.Output[str]:
"""
The ID of the Key Vault where the Secret should be created.
"""
return pulumi.get(self, "key_vault_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Key Vault Secret. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notBeforeDate")
def not_before_date(self) -> pulumi.Output[Optional[str]]:
"""
Key not usable before the provided UTC datetime (Y-m-d'T'H:M:S'Z').
"""
return pulumi.get(self, "not_before_date")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def value(self) -> pulumi.Output[str]:
"""
Specifies the value of the Key Vault Secret.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
The current version of the Key Vault Secret.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="versionlessId")
def versionless_id(self) -> pulumi.Output[str]:
"""
The Base ID of the Key Vault Secret.
"""
return pulumi.get(self, "versionless_id")
| 40.597902 | 153 | 0.619283 | 2,839 | 23,222 | 4.847129 | 0.0708 | 0.095923 | 0.106824 | 0.089528 | 0.877335 | 0.857423 | 0.840273 | 0.81542 | 0.811714 | 0.784972 | 0 | 0.002186 | 0.270993 | 23,222 | 571 | 154 | 40.669002 | 0.81068 | 0.340152 | 0 | 0.719595 | 1 | 0 | 0.085902 | 0.00204 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162162 | false | 0.003378 | 0.016892 | 0 | 0.277027 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
05ea982b096d6ed38c68c9783719698492a2931a | 17,621 | py | Python | data/prepare_data_fixmatch.py | YBZh/AuxSelfTrain | 6cae3949dbf013f4aa3937861cf437c19de519b0 | [
"MIT"
] | 2 | 2021-06-19T02:17:34.000Z | 2021-11-03T04:21:06.000Z | data/prepare_data_fixmatch.py | YBZh/AuxSelfTrain | 6cae3949dbf013f4aa3937861cf437c19de519b0 | [
"MIT"
] | null | null | null | data/prepare_data_fixmatch.py | YBZh/AuxSelfTrain | 6cae3949dbf013f4aa3937861cf437c19de519b0 | [
"MIT"
] | null | null | null | import os
import random
import torch
from torchvision import transforms
from data.data_list import Imagelists_VISDA, return_classlist, Imagelists_VISDA_unl, Imagelists_VISDA_unl_paired
from data.randaugment import RandAugmentMC
def _select_image_process(DATA_TRANSFORM_TYPE):
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
if DATA_TRANSFORM_TYPE == 'ours':
transforms_train = transforms.Compose([
transforms.Resize(256),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
transforms_test = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])
elif DATA_TRANSFORM_TYPE == 'mme':
transforms_train_weak = transforms.Compose([
transforms.Resize((256, 256)),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
transforms_train_strong = transforms.Compose([
transforms.Resize((256, 256)),
transforms.RandomCrop(224),
transforms.RandomHorizontalFlip(),
RandAugmentMC(2, 10),
transforms.ToTensor(),
normalize,
])
transforms_test = transforms.Compose([
transforms.Resize((256, 256)),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])
elif DATA_TRANSFORM_TYPE == 'simple':
transforms_train_weak = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize,
])
transforms_train_strong = transforms.Compose([
transforms.Resize((224, 224)),
RandAugmentMC(2, 10),
transforms.ToTensor(),
normalize,
])
transforms_test = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
normalize,
])
else:
raise NotImplementedError
return transforms_train_weak, transforms_train_strong, transforms_test
def generate_dataloader(args):
dataloaders = {}
base_path = './data/txt/%s' % args.dataset
root = args.datapath
image_set_file_s = os.path.join(base_path, 'labeled_source_images_' + args.source + '.txt')
image_set_file_t = os.path.join(base_path, 'labeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
#image_set_file_t_val = os.path.join(base_path, 'validation_target_images_' + args.target + '_3.txt') #### actually, we don't utilize the val dataset
image_set_file_unl = os.path.join(base_path, 'unlabeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
transforms_train_weak, transforms_train_strong, transforms_test = _select_image_process(args.transform_type)
############ dataloader #############################
source_dataset = Imagelists_VISDA_unl(image_set_file_s, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
target_dataset = Imagelists_VISDA_unl(image_set_file_t, root=root, transform=transforms_train_weak, transform2=transforms_train_strong)
target_dataset_unl = Imagelists_VISDA_unl(image_set_file_unl, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
# target_dataset_unl_pseudo = Imagelists_VISDA_unl(image_set_file_unl, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
target_dataset_test = Imagelists_VISDA(image_set_file_unl, root=root, transform=transforms_test)
class_list = return_classlist(image_set_file_s)
print("%d classes in this dataset" % len(class_list))
source_loader = torch.utils.data.DataLoader(source_dataset, batch_size=args.batchsize - int(args.batchsize * args.ratio_t),
num_workers=args.num_workers, shuffle=True, drop_last=True, pin_memory=True)
##########################################################################################################################
randombatchsampler = RandomBatchSampler(batch_size=int(args.batchsize * args.ratio_t), len_imgs=len(target_dataset))
target_loader = torch.utils.data.DataLoader(target_dataset,
num_workers=args.num_workers, batch_sampler=randombatchsampler, pin_memory=True)
print('In each iteration: source labeleed data: %d, target labeled data: %d, target Unl data: %d' % (args.batchsize - int(args.batchsize * args.ratio_t), int(args.batchsize * args.ratio_t), int(args.batchsize * args.mu)))
########################################################################################################################
# target_loader = torch.utils.data.DataLoader(target_dataset, batch_size=min(args.batchsize, len(target_dataset)),
# num_workers=3, shuffle=True, drop_last=True)
randombatchsampler_unl = RandomBatchSampler(int(args.batchsize * args.mu), len_imgs=len(target_dataset_unl))
target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
num_workers=args.num_workers, batch_sampler=randombatchsampler_unl, pin_memory=True)
# target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
# batch_size=args.batchsize * args.mu, num_workers=4, shuffle=True, drop_last=True)
# target_loader_pseudo = torch.utils.data.DataLoader(target_dataset_unl_pseudo, batch_size=args.batchsize,
# num_workers=args.num_workers, shuffle=False, drop_last=False)
target_loader_test = torch.utils.data.DataLoader(target_dataset_test,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=False, drop_last=False, pin_memory=True)
dataloaders['source'] = source_loader
dataloaders['target_l'] = target_loader
dataloaders['target_u'] = target_loader_unl
# dataloaders['target_u_pseudo'] = target_loader_pseudo
dataloaders['test'] = target_loader_test
return dataloaders
def generate_dataloader_path(args):
dataloaders = {}
base_path = './data/txt/%s' % args.dataset
root = args.datapath
image_set_file_s = os.path.join(base_path, 'labeled_source_images_' + args.source + '.txt')
image_set_file_t = os.path.join(base_path, 'labeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
image_set_file_t_val = os.path.join(base_path, 'validation_target_images_' + args.target + '_3.txt') #### actually, we don't utilize the val dataset
image_set_file_unl = os.path.join(base_path, 'unlabeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
transforms_train_weak, transforms_train_strong, transforms_test = _select_image_process(args.transform_type)
############ dataloader #############################
source_dataset = Imagelists_VISDA_unl(image_set_file_s, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
target_dataset = Imagelists_VISDA_unl(image_set_file_t, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
target_dataset_unl = Imagelists_VISDA_unl(image_set_file_unl, root=root, transform=transforms_train_weak, transform2=transforms_train_strong, path=True)
target_dataset_val = Imagelists_VISDA(image_set_file_t_val, root=root, transform=transforms_test, path=True)
target_dataset_test = Imagelists_VISDA(image_set_file_unl, root=root, transform=transforms_test, path=True)
class_list = return_classlist(image_set_file_s)
print("%d classes in this dataset" % len(class_list))
source_loader = torch.utils.data.DataLoader(source_dataset, batch_size=args.batchsize - int(args.batchsize * args.ratio_t),
num_workers=args.num_workers, shuffle=True, drop_last=True)
##########################################################################################################################
randombatchsampler = RandomBatchSampler(batch_size=int(args.batchsize * args.ratio_t), len_imgs=len(target_dataset))
target_loader = torch.utils.data.DataLoader(target_dataset,
num_workers=args.num_workers, batch_sampler=randombatchsampler)
print('In each iteration: source labeleed data: %d, target labeled data: %d, target Unl data: %d' % (args.batchsize - int(args.batchsize * args.ratio_t), int(args.batchsize * args.ratio_t), args.batchsize * args.mu))
########################################################################################################################
target_loader_proto = torch.utils.data.DataLoader(target_dataset, batch_size=min(args.batchsize, len(target_dataset)),
num_workers=args.num_workers, shuffle=False, drop_last=False)
randombatchsampler_unl = RandomBatchSampler(args.batchsize * args.mu, len_imgs=len(target_dataset_unl))
target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
num_workers=args.num_workers, batch_sampler=randombatchsampler_unl)
# target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
# batch_size=args.batchsize * args.mu, num_workers=args.num_workers, shuffle=True, drop_last=True)
target_loader_val = torch.utils.data.DataLoader(target_dataset_val, batch_size=min(args.batchsize, len(target_dataset_val)),
num_workers=args.num_workers, shuffle=False, drop_last=False)
target_loader_test = torch.utils.data.DataLoader(target_dataset_test,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=False, drop_last=False)
dataloaders['source'] = source_loader
dataloaders['target_l'] = target_loader
dataloaders['target_l_proto'] = target_loader_proto
dataloaders['target_u'] = target_loader_unl
dataloaders['val'] = target_loader_val
dataloaders['test'] = target_loader_test
return dataloaders
def generate_dataloader_mmd(args):
dataloaders = {}
base_path = './data/txt/%s' % args.dataset
root = args.datapath
image_set_file_s = os.path.join(base_path, 'labeled_source_images_' + args.source + '.txt')
image_set_file_t = os.path.join(base_path, 'labeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
image_set_file_unl = os.path.join(base_path, 'unlabeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
transforms_train_weak, transforms_train_strong, transforms_test = _select_image_process(args.transform_type)
source_dataset = Imagelists_VISDA(image_set_file_s, root=root, transform=transforms_train_weak, path=True)
target_dataset_l = Imagelists_VISDA(image_set_file_t, root=root, transform=transforms_train_weak, path=False)
target_dataset_u = Imagelists_VISDA(image_set_file_unl, root=root, transform=transforms_train_weak, path=True)
source_loader = torch.utils.data.DataLoader(source_dataset,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=True, drop_last=False, pin_memory=True)
target_l_loader = torch.utils.data.DataLoader(target_dataset_l,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=True, drop_last=False, pin_memory=True)
target_u_loader = torch.utils.data.DataLoader(target_dataset_u,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=True, drop_last=False, pin_memory=True)
dataloaders['source'] = source_loader
dataloaders['target_l'] = target_l_loader
dataloaders['target_u'] = target_u_loader
return dataloaders
def generate_dataloader_paired(args):
dataloaders = {}
base_path = './data/txt/%s' % args.dataset
root = args.datapath
image_set_file_s = os.path.join(base_path, 'labeled_source_images_' + args.source + '.txt')
image_set_file_t = os.path.join(base_path, 'labeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
image_set_file_t_val = os.path.join(base_path, 'validation_target_images_' + args.target + '_3.txt') #### actually, we don't utilize the val dataset
image_set_file_unl = os.path.join(base_path, 'unlabeled_target_images_' + args.target + '_%d.txt' % (args.num_labeled))
transforms_train_weak, transforms_train_strong, transforms_test = _select_image_process(args.transform_type)
############ dataloader #############################
# source_dataset = Imagelists_VISDA_unl(image_set_file_s, root=root, transform=transforms_train_weak, transform2=transforms_train_strong)
# target_dataset = Imagelists_VISDA_unl(image_set_file_t, root=root, transform=transforms_train_weak, transform2=transforms_train_strong)
st_dataset = Imagelists_VISDA_unl_paired(image_set_file_s, image_set_file_t, root=root, transform=transforms_train_weak,
transform2=transforms_train_strong)
target_dataset_unl = Imagelists_VISDA_unl(image_set_file_unl, root=root, transform=transforms_train_weak, transform2=transforms_train_strong)
target_dataset_val = Imagelists_VISDA(image_set_file_t_val, root=root, transform=transforms_test)
target_dataset_test = Imagelists_VISDA(image_set_file_unl, root=root, transform=transforms_test)
class_list = return_classlist(image_set_file_s)
print("%d classes in this dataset" % len(class_list))
st_loader = torch.utils.data.DataLoader(st_dataset, batch_size=int(args.batchsize * 0.5),
num_workers=args.num_workers, shuffle=True, drop_last=True)
# source_loader = torch.utils.data.DataLoader(source_dataset, batch_size=args.batchsize - int(args.batchsize * args.ratio_t),
# num_workers=args.num_workers, shuffle=True, drop_last=True)
##########################################################################################################################
# randombatchsampler = RandomBatchSampler(batch_size=int(args.batchsize * args.ratio_t), len_imgs=len(target_dataset))
# target_loader = torch.utils.data.DataLoader(target_dataset,
# num_workers=args.num_workers, batch_sampler=randombatchsampler)
print('In each iteration: source labeleed data: %d, target labeled data: %d, target Unl data: %d' % (args.batchsize - int(args.batchsize * args.ratio_t), int(args.batchsize * args.ratio_t), args.batchsize * args.mu))
########################################################################################################################
# target_loader = torch.utils.data.DataLoader(target_dataset, batch_size=min(args.batchsize, len(target_dataset)),
# num_workers=3, shuffle=True, drop_last=True)
randombatchsampler_unl = RandomBatchSampler(args.batchsize * args.mu, len_imgs=len(target_dataset_unl))
target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
num_workers=args.num_workers, batch_sampler=randombatchsampler_unl)
# target_loader_unl = torch.utils.data.DataLoader(target_dataset_unl,
# batch_size=args.batchsize * args.mu, num_workers=args.num_workers, shuffle=True, drop_last=True)
target_loader_val = torch.utils.data.DataLoader(target_dataset_val, batch_size=min(args.batchsize, len(target_dataset_val)),
num_workers=args.num_workers, shuffle=False, drop_last=False)
target_loader_test = torch.utils.data.DataLoader(target_dataset_test,
batch_size=args.batchsize, num_workers=args.num_workers, shuffle=False, drop_last=False)
dataloaders['st'] = st_loader
dataloaders['target_u'] = target_loader_unl
dataloaders['val'] = target_loader_val
dataloaders['test'] = target_loader_test
return dataloaders
class Sampler(object):
def __init__(self, data_source):
pass
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
class UniformBatchSampler(Sampler):
def __init__(self, per_category, category_index_list):
self.per_category = per_category
self.category_index_list = category_index_list
def __iter__(self):
for bat in range(60000):
batch = []
for i in range(len(self.category_index_list)):
batch = batch + random.sample(self.category_index_list[i], self.per_category)
random.shuffle(batch)
yield batch
class RandomBatchSampler(Sampler):
def __init__(self, batch_size, len_imgs):
self.batch_size = batch_size
self.imgs_list = list(range(len_imgs))
def __iter__(self):
for bat in range(60000):
batch = []
batch = batch + random.sample(self.imgs_list, self.batch_size)
random.shuffle(batch)
yield batch
| 61.82807 | 225 | 0.662278 | 2,041 | 17,621 | 5.365017 | 0.067614 | 0.042922 | 0.041644 | 0.054795 | 0.901187 | 0.878447 | 0.873516 | 0.854247 | 0.845205 | 0.83242 | 0 | 0.008212 | 0.198343 | 17,621 | 284 | 226 | 62.045775 | 0.766955 | 0.136655 | 0 | 0.713615 | 0 | 0.014085 | 0.064485 | 0.022472 | 0 | 0 | 0 | 0 | 0 | 1 | 0.056338 | false | 0.004695 | 0.028169 | 0 | 0.122066 | 0.028169 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
af5bbf1f2c2f71d3b797ecb60e351200d2708dd2 | 258 | py | Python | addons14/account_banking_pain_base/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-06-10T14:59:13.000Z | 2021-06-10T14:59:13.000Z | addons14/account_banking_pain_base/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | null | null | null | addons14/account_banking_pain_base/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-04-09T09:44:44.000Z | 2021-04-09T09:44:44.000Z | from . import account_payment_line
from . import account_payment_order
from . import bank_payment_line
from . import account_payment_mode
from . import res_company
from . import res_config_settings
from . import account_payment_method
from . import res_bank
| 28.666667 | 36 | 0.844961 | 38 | 258 | 5.368421 | 0.342105 | 0.392157 | 0.333333 | 0.470588 | 0.343137 | 0.343137 | 0 | 0 | 0 | 0 | 0 | 0 | 0.124031 | 258 | 8 | 37 | 32.25 | 0.902655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
afacc2d962509ba3d8b3d1bb8420dc8a25d6a7dc | 6,074 | py | Python | boto3_type_annotations/boto3_type_annotations/ec2/waiter.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 119 | 2018-12-01T18:20:57.000Z | 2022-02-02T10:31:29.000Z | boto3_type_annotations/boto3_type_annotations/ec2/waiter.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 15 | 2018-11-16T00:16:44.000Z | 2021-11-13T03:44:18.000Z | boto3_type_annotations/boto3_type_annotations/ec2/waiter.py | cowboygneox/boto3_type_annotations | 450dce1de4e066b939de7eac2ec560ed1a7ddaa2 | [
"MIT"
] | 11 | 2019-05-06T05:26:51.000Z | 2021-09-28T15:27:59.000Z | from typing import Dict
from typing import List
from botocore.waiter import Waiter
class BundleTaskComplete(Waiter):
def wait(self, BundleIds: List = None, Filters: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class ConversionTaskCancelled(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class ConversionTaskCompleted(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class ConversionTaskDeleted(Waiter):
def wait(self, ConversionTaskIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class CustomerGatewayAvailable(Waiter):
def wait(self, CustomerGatewayIds: List = None, Filters: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class ExportTaskCancelled(Waiter):
def wait(self, ExportTaskIds: List = None, WaiterConfig: Dict = None):
pass
class ExportTaskCompleted(Waiter):
def wait(self, ExportTaskIds: List = None, WaiterConfig: Dict = None):
pass
class ImageAvailable(Waiter):
def wait(self, ExecutableUsers: List = None, Filters: List = None, ImageIds: List = None, Owners: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class ImageExists(Waiter):
def wait(self, ExecutableUsers: List = None, Filters: List = None, ImageIds: List = None, Owners: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class InstanceExists(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class InstanceRunning(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class InstanceStatusOk(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, MaxResults: int = None, NextToken: str = None, DryRun: bool = None, IncludeAllInstances: bool = None, WaiterConfig: Dict = None):
pass
class InstanceStopped(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class InstanceTerminated(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class KeyPairExists(Waiter):
def wait(self, Filters: List = None, KeyNames: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class NatGatewayAvailable(Waiter):
def wait(self, Filters: List = None, MaxResults: int = None, NatGatewayIds: List = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class NetworkInterfaceAvailable(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, NetworkInterfaceIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class PasswordDataAvailable(Waiter):
def wait(self, InstanceId: str, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class SnapshotCompleted(Waiter):
def wait(self, Filters: List = None, MaxResults: int = None, NextToken: str = None, OwnerIds: List = None, RestorableByUserIds: List = None, SnapshotIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class SpotInstanceRequestFulfilled(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, SpotInstanceRequestIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class SubnetAvailable(Waiter):
def wait(self, Filters: List = None, SubnetIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class SystemStatusOk(Waiter):
def wait(self, Filters: List = None, InstanceIds: List = None, MaxResults: int = None, NextToken: str = None, DryRun: bool = None, IncludeAllInstances: bool = None, WaiterConfig: Dict = None):
pass
class VolumeAvailable(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class VolumeDeleted(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class VolumeInUse(Waiter):
def wait(self, Filters: List = None, VolumeIds: List = None, DryRun: bool = None, MaxResults: int = None, NextToken: str = None, WaiterConfig: Dict = None):
pass
class VpcAvailable(Waiter):
def wait(self, Filters: List = None, VpcIds: List = None, DryRun: bool = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class VpcExists(Waiter):
def wait(self, Filters: List = None, VpcIds: List = None, DryRun: bool = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class VpcPeeringConnectionDeleted(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, VpcPeeringConnectionIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class VpcPeeringConnectionExists(Waiter):
def wait(self, Filters: List = None, DryRun: bool = None, VpcPeeringConnectionIds: List = None, NextToken: str = None, MaxResults: int = None, WaiterConfig: Dict = None):
pass
class VpnConnectionAvailable(Waiter):
def wait(self, Filters: List = None, VpnConnectionIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
class VpnConnectionDeleted(Waiter):
def wait(self, Filters: List = None, VpnConnectionIds: List = None, DryRun: bool = None, WaiterConfig: Dict = None):
pass
| 38.201258 | 219 | 0.689825 | 700 | 6,074 | 5.985714 | 0.105714 | 0.116468 | 0.096181 | 0.125776 | 0.797613 | 0.796659 | 0.796659 | 0.781384 | 0.767303 | 0.755609 | 0 | 0 | 0.202009 | 6,074 | 158 | 220 | 38.443038 | 0.864452 | 0 | 0 | 0.552083 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.322917 | false | 0.333333 | 0.03125 | 0 | 0.677083 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 8 |
bba462c3dbfc1ee5fdc9cec4d277e4c787f62d96 | 87 | py | Python | tools/third_party/more-itertools/more_itertools/__init__.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | tools/third_party/more-itertools/more_itertools/__init__.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 7,642 | 2018-05-28T09:38:03.000Z | 2022-03-31T20:55:48.000Z | tools/third_party/more-itertools/more_itertools/__init__.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 1,303 | 2018-05-29T14:50:02.000Z | 2022-03-30T17:30:42.000Z | from more_itertools.more import * # noqa
from more_itertools.recipes import * # noqa
| 29 | 44 | 0.770115 | 12 | 87 | 5.416667 | 0.5 | 0.246154 | 0.523077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.16092 | 87 | 2 | 45 | 43.5 | 0.890411 | 0.103448 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bbdfe7ffb262933daae82dff4fe8e64934bdb7d5 | 5,423 | py | Python | flask_web/base_stream_processing/base_stream_processing_py3.py | bopopescu/docker_images_a | 348d0982c5962f2ae34d10183ed9522b7a6fe286 | [
"MIT"
] | null | null | null | flask_web/base_stream_processing/base_stream_processing_py3.py | bopopescu/docker_images_a | 348d0982c5962f2ae34d10183ed9522b7a6fe286 | [
"MIT"
] | null | null | null | flask_web/base_stream_processing/base_stream_processing_py3.py | bopopescu/docker_images_a | 348d0982c5962f2ae34d10183ed9522b7a6fe286 | [
"MIT"
] | null | null | null |
class Base_Stream_Processing(object):
def format_data(self,stream_data,show_legend = False, title = "",title_x="",title_y="",ntick_x = 20,ntick_y=20):
return_value = {}
keys = list(stream_data[0]["data"].keys())
for i in keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
new_key = new_key.replace('-','_')
return_value[new_key] = self.format_key(i,stream_data,show_legend,title,"Date",i,ntick_x,ntick_y)
old_keys = keys
keys = []
for i in old_keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
new_key = new_key.replace('-','_')
keys.append(new_key)
stream_range = []
if len(keys) > 0:
for i in range(0,len(keys)):
stream_range.append(i)
return keys,stream_range,return_value
def format_data_variable_title(self,stream_data,show_legend = False,
title = "",title_x="",title_y="",ntick_x = 20,ntick_y=20):
return_value = {}
keys = list(stream_data[-1]["data"].keys())
for i in keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
return_value[new_key] = self.format_key(i,stream_data,show_legend,title+i,title_x,title_y,ntick_x,ntick_y)
old_keys = keys
keys = []
for i in old_keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
keys.append(new_key)
stream_range = []
for i in range(0,len(keys)):
stream_range.append(i)
return keys,stream_range,return_value
def format_key(self,key,stream_data,show_legend = False, title = "",title_x="",title_y="",ntick_x = 20,ntick_y=20):
data = {}
x_axis = {
"autorange":True,
"showgrid":True,
"zeroline":True,
"ntick": ntick_x,
"showline":True,
"title":title_x,
"mirror":"all"
}
y_axis = {
"autorange":True,
"showgrid":True,
"zeroline":True,
"ntick": ntick_y,
"showline":True,
"title":title_y,
"mirror":"all"
}
layout = {
'title':title,
'showlegend': show_legend,
'xaxis':x_axis,
'yaxis':y_axis,
};
data = {}
data["x"] = []
data["y"] = []
# assigning type
data["type"] ="lines"+"markers"
for i in stream_data:
if key in i["data"]:
ts = i["timestamp"]
data["x"].append(ts)
data["y"].append(i["data"][key])
return {"data":data,"layout" : layout}
def format_data_specific_key(self,stream_data,show_legend = False, title = "",title_x="",title_y="",ntick_x = 20,ntick_y=20,specific_key = ""):
return_value = {}
keys = list(stream_data[0]["data"].keys())
for i in keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
new_key = new_key.replace('-','_')
return_value[new_key] = self.format_specific_key(i,stream_data,show_legend,title+i,"Date",i,ntick_x,ntick_y,specific_key = specific_key)
old_keys = keys
keys = []
for i in old_keys:
new_key = i
new_key = new_key.replace('%','')
new_key = new_key.replace('/','')
new_key = new_key.replace('-','_')
keys.append(new_key)
stream_range = []
for i in range(0,len(keys)):
stream_range.append(i)
return keys,stream_range,return_value
def format_specific_key(self,key,stream_data,show_legend = False, title = "",title_x="",title_y="",ntick_x = 20,ntick_y=20,specific_key = ""):
data = {}
x_axis = {
"showgrid":True,
"zeroline":True,
"ntick": ntick_x,
"showline":True,
"title":title_x,
"mirror":"all"
}
y_axis = {
"showgrid":True,
"zeroline":True,
"ntick": ntick_y,
"showline":True,
"title":title_y,
"mirror":"all"
}
layout = {
'title':title,
'showlegend': show_legend,
'xaxis':x_axis,
'yaxis':y_axis,
};
data = {}
data["x"] = []
data["y"] = []
# assigning type
data["type"] ="lines"+"markers"
for i in stream_data:
if key in i["data"]:
ts = i["timestamp"]
data["x"].append(ts)
#print(i["data"])
data["y"].append(i["data"][key][specific_key])
return {"data":data,"layout" : layout}
| 28.39267 | 149 | 0.460815 | 596 | 5,423 | 3.924497 | 0.09396 | 0.112869 | 0.061565 | 0.082086 | 0.93074 | 0.93074 | 0.876443 | 0.874733 | 0.861479 | 0.848653 | 0 | 0.00827 | 0.397935 | 5,423 | 191 | 150 | 28.39267 | 0.708116 | 0.008298 | 0 | 0.865672 | 0 | 0 | 0.072159 | 0 | 0.014925 | 0 | 0 | 0 | 0 | 1 | 0.037313 | false | 0 | 0 | 0 | 0.08209 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bbfd20c63cdb5c3eece8a2f36920e8dfa8aa37c9 | 181 | py | Python | core/data.py | abdallah-elsharif/WRock | 7cfd4bf29e932bf0048ee357c16cf6c021e7fb81 | [
"MIT"
] | 14 | 2022-03-13T19:51:24.000Z | 2022-03-18T07:36:39.000Z | core/data.py | abdallah-elsharif/WRock | 7cfd4bf29e932bf0048ee357c16cf6c021e7fb81 | [
"MIT"
] | null | null | null | core/data.py | abdallah-elsharif/WRock | 7cfd4bf29e932bf0048ee357c16cf6c021e7fb81 | [
"MIT"
] | 3 | 2022-03-14T05:58:06.000Z | 2022-03-14T11:46:47.000Z |
from os.path import dirname, abspath
def rockPATH():
return dirname(dirname(abspath(__file__)))
def rockVERSION():
with open('VERSION', 'r') as f:
return f.read() | 20.111111 | 46 | 0.662983 | 24 | 181 | 4.833333 | 0.75 | 0.241379 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.198895 | 181 | 9 | 47 | 20.111111 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0.044199 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.166667 | 0.166667 | 0.833333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
a52a616af9c63bbed44182e4f56596f39ba62e4e | 6,065 | py | Python | W-Login.py | Dark-Cyber-07/Dark-Cyber | da50f4bceeebc2f9969c9a266d8e68c8fcae3074 | [
"MIT"
] | 7 | 2021-06-25T10:02:23.000Z | 2022-03-30T09:32:05.000Z | W-Login.py | Dark-Cyber-07/Dark-Cyber | da50f4bceeebc2f9969c9a266d8e68c8fcae3074 | [
"MIT"
] | null | null | null | W-Login.py | Dark-Cyber-07/Dark-Cyber | da50f4bceeebc2f9969c9a266d8e68c8fcae3074 | [
"MIT"
] | 1 | 2021-06-17T09:04:33.000Z | 2021-06-17T09:04:33.000Z | import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b64decode("eJztWklwG1d6fg1wBUmJ1EZtltsaW6BsEDu4ilG4SZQlLgEp04bEQA30I9kkuhtCNyRqBqq4SnFGM4fEnoykGZcVV00qhySV5eTyIZVLDnNIDrnNJYcMq3JKDj6kKsfk///XjR0U7agqc0ij++Et//K9/23/e91Z5lzt8Pw2PFbGw5gKt8RyjKXKcYmlJDfuYSmPG/eylNeNt7FUmxtvZ6l2N97BUh1uvJOlOt14F0t1ufFulup24z6W8rnxHpbqYZyxnV6metgTiUl77zPex1QvJuY2rrDUEcbb2M5RpoLydvYE0PUzPsBU0NmJSS6x9DHG+9n1BYjuHGM7x9kTxqT7f8HWH/W7ik6w1Am2blxgbfwk2/WxwlkJLkdzF2k2JPZhheEUS50ChrEyA69i6CYG7mE7g0z1iYQo6SmLQjm9LHea6WdY6gyTMN3HcmeZfo6lzon0EdSjn2ep8yDgDawKiYJqXGDqUZE4xXbOY4VSbzL+JtuRGX9LFEDiIsPi77Gdt5FC7RdgJEk9z7YkJE+/w9Tj7PeB+xJTT1DEz9STFBli6imKXGbqIEu9y9TTLPUe24KcAIbKMIVBygkx9QwRh5l6liIRxqNMPcd2Pazwux4eJqMbZLzVoTegr2n/DdeS1QPRgi4PFzbloL1nayv/AEXX/7Ofab/3/GfMaoNizLcliCjWSQjzWj4qa4ZlK7mcXOD3i9yyLetUfYnOs9uKoX2fa8hqHcXyR/a2aURlQ88UgvlHQ1hg+yBY2y5wRV0xzZzIQ+JZ0zB41tZMY75QMAuioBOCmYL50OIFG6EV7c0xuwsiurKXtjWdWyiuCMXDyhY3bOsCsM0puQfabigSHAmG5aFbmlHcm5RvT8rThlowNVWOB+PB6KS89GEiIc8UtZwaurm8lkiERy7Ld67NTC+Frs3Epych9kEoEgYZ8IuOBEfGIGvmg1A8MR6OR0bCkJpbDP1A5Yal2Y+mYsFw4KGm2ttTkfBYOLDNta1teyoyHg0/BspbsyHNTt9Yg2iyRsRsMrRiWjZfNDNajkPG4rWQYhUt1DXnxlaWQllTD24qWZ4xzd3grmIrhoIAPghNr95eTafC4ek5SK9+EEoEUezySiiC0qdDe2MjE0pB50pGG34wqkxuZN1JCA08i02FTQnd5PqCDYMGOruH+g4MiNUhmJ7YkoUGv/PWhjy/p9lDXmwXzAfYGLceWdQ0HAtRbiWwvNSJTKLjexnMpB7GpHYpi/pRjtfFgcHeFVYiNINzG2H2WGJlTCUxGp20lzCC4p12d6q638PWCbSXQKN439bXFz7599/5+k+uDnVAMondycaYZatm0bZxHn5Y0GxOsc1c0dqmumDPoiwrx3merEAD4vsU8hY1zFuZAOZ1UQ0HpH6pTypb2+PWcgCFUSVUUaOKoWnkUDTZh8GRGmvWKqThEcUcD6n7VoqobtkcVwpCW0fZLtCYXG+lE2mzmXhFqfUTmE58Phmvc3fCk+MxHWKBYbgC8v/28tVJDUEQvFdqTgw6g99Cblls4B5K9cvyBMGuQ41S/fUaiS4wHMRaVuv0OWLjujyJaQrkkJ/k361mD0ygvlKVvGDgHqqSS0QHNb1bJdeVGvSn02kygxzwhxwFfoIg2NE4fpAsFKfTwVAJlaC2SfFfLbgsl4AIrtKEGxOULjvGSgLpJCpGOiwk45Tk4N0ylys36lihFPQjx4SQFborBDvsEC3JE4A0HcJc0OEoLAXpv1SxUpXce2h5PwlA+gms9KTDQCGUgVKy5STqALv60VSQFHjlGryy03AgGmmwCOngnnRaQnCTDpDo1OHuhIDrwAC2IDVdvdiELuqK5vI7tkR2aESRXXJNPSkE+6lDEoxJh6C62arlOnQlARfrTZbxu8aYFKIcxG5zuf+Td6sEV4ZbQjRdCfuj2/ziEmz+e6TE7SP3QI2LY4IMBRUK+hvwjuoEj1gJb6BMU8ZbviZE53BxTFDWxLC/QiFmnYpouhBZFY3LXnUhgovVOOoJfFXxWuG1QvwNuVXXvUYCX3PK73z5YDxEEJ1v//ln+8+f1tzPXlQynXjd3Zq+hqWcfNrkbppPvAJbXHfzfrr/7FO86/ietcDVir5J5RpYnPtlK1ECWkSvh/Ds81q+VtAak66eJpVrauWGfAfmSwEt2gDtsFZrBY0qV6Ot0WovD26AstXqJLnQW3aQg+mfuaZvhNaqrzXUsmK1ciN+WlOr+swDil7W9oTPamlethTVnOWlz+dMqPvP/qoZ42/m/dcCNTb2Z05s+vbawnISpmBKj+iLalBeU4wHWkGe3ta52mR+cjjdpGioOrHXnB0NTO6OYPkQkr+DfNhc2krWLmvZtu28NREKVfZUsMEKrcIG1jaDD5RiPBwvC8TW+9v/6zY5/P03Pp+GznnjhmCtfkMADv31BRWDBdWL0TYM2jHowKATgy4MujHwYdCDxygnYK/orewScbOylDyOOsYgcMZjuaW++fKnf3ZHXp+/Nbu8OC+vLcszOSW7Ky8oNjzZXV6w5A2ksSIuc6Rqyd3/4pPV+Vvzs2vy9NJH8vLSvLx6Y1Feml9bX07ehELrSkXlnW++/MlfRjDYcBbH/Wd/BzclYvri8syNWzeWbpLcoZUCtyw5crlBQLSFgKi+BkCWYBxUC4g2Coi1EBDX16eTN+bcmjkCYo0C4i0EJPTb19ACtQLijQISLQSM6KnlpetynYDEZStWJ2CkhYBR/fbK3PQaNMJHq2vzi1a8lu8XYQya8EX0+Q9vrKHKGWjyy7IWdXfKVBzWh4fa3J1o8gR2VEzmzC2T9qUKHQy12pciqc6NYhozz9POtE9qh59EYfWvW2yUUWa7c/ZgtbXTuBCD4jGjAwgPHaT5GY0L2GzTadq/eLGojYr+GQ9Bdrx0CCrRSUTDqHm0TGLpdPJHDA8DfyThkeRjie2FGezBSxIOpicgpwMVzG2cYY89eOi108VKkNuNuXS08QlbX7/vbWuzfXT4+U8MTxX7xJGNekQg6BHnrN62D40YAT1KQB9JTYDi8aw6gMGxCtDjrwFoUKoBWpQOBvpjDwI9QUAvepoAPYnBKQwGXy/QX9UCveA5GOivJQR6moD+YzOgZzA4i8G51wv0Y08N0F++AugYAT1PQG1vE6BvYHABgzdfL9DL3hqgee/BQP+IxplMQH9dB1Qk3mqZqKrPgt3L1Isi3cd2jjD1e9WUb4sXABdJ2Tuk7L+89lExnntrhrfdz+wBrNkTMRF4GYq+xEpeNlgl3i/EQ9EQa5p9uXn2u43Z6ntMDeBiuvBDxn4osU0PU4fZHzD2uI3Zx5gaJBzQBu34FqXURuA8gi+EfGoY2zHCHOgnHOiDapQNOnkn3TwgjDmNEW+0EK3jCTo4vYEHp86c/s2Xn39ct/o6K6+8dHtxZj4pO+vS/vOvqm/ZWQFkmyZqfI2AB8jXtFxO1gw5axYKPGvnHtExZkS4Du9XlpL9F0+r77rlO/T+dColzy7PzcsL88n55jzW2YozEQ4H5HAEnig8MXjigZriBGSNwDMKzxg84wHrOhTL7p7nxS/2X3zsqBHWmJ9De6A5EEZ5R/nV/os/das+4ZqATrzfG4/Gal+uFKxjFatwecm05Wtm0VCtbrT/HRnXSXmDSKPW3MG2iZQ9k0NbJabHwSpxsEocrBIHq8SrrQLFYJWReEC+erX8vA6ruBGqWMyaObhiI47H9KpqnXLFJPQoVCsK1YpCtaKxQE1RbXUIQvxVECKOz3VoCHE9BupjoD4GVo3FAzVFYNUY9LXYaAVCwpo+GEJCeG2HRhDTI2CECKCIAIpItRGgCIwQARQRQBEZFQhGyGuqeZ03PFzMbxUUlcsZSBpWEJt+bVuz5DXTzMnbiiVnODdkq5jNgg+5WczlHsnFvKrYXAWvHG4NNx7Uyd33gMHk/PTc4nxQV0lrmLTe2f/ijzdkB9yaCcrlpaKegV3BhLwldjD/etUK11KO6Cs5rlhcfqhodkDOF0wEIQO6QtEwNGNLDgaD1kQtU1S/pVg2DHN5TtvSbHm2AEMskFd2NaiyERgdG4FbDEJiH3XGp9MNhK88u5a8BXaAmot0Cuwhr9pmXhhnRSBpdG/pdVe343bSfPdLL7masNI+Gqjdl60bA6wNpmdcRt9FV4/ehf+hF9ZiXHu9bKcNPdSnMKs/ZWywhDM+LFKwHA+CdzroLF7tuGSX6M39Y3pZDwvIbgcstviyvoa3U/DC4tCGb9/RY/W4604nkpYpfY6WHrEkIkGXWDy72Y4P6ZAMSWjZ/ZUHfAdQC27AGVR9VwJHoEb10WrV/a5qwN3xKtUdZdU9zVTf9YB/+7jzAGt1HspaRz3gItfwHhe8h7RW56ut1elA/nOp1lo/9jSzVuehrdX5amu5qoMSOOGPuw6wVtehrDXlBY+9oW91HdpaXa+2VpcD+Qes1lpfe2Gf0GCtrkNbq+vV1nJVM7ZudLrjkjynQXqriuu7pTzg1mcQcY+YMsNKXqs9Z9I5zIdqSCna20HY4mrGVYUm0bRt7nJjKhobHU2Mj4fHE+ORkUTinWgimhidDW9G4mFFyXB1MzOSULLRUWU0Ns7ViBKNjsQykUubZkFX7KkdyzQuWepu+gFMobBpnopc4rqi5aYsBHkpZ2aVHJ/iRvr26qW8YlkPzYI6ZS1gGXBNaaZ1aYsbvAATedrCqcw00llArnELRFna1lRsM5FIbI6PAY7IZlYdVZRwNh7fTIxtJqJRvqmDt4s79kqNaPFwZtHVhemlteXhhenZm/Nzji8w7L4t+OLnzuw+bOEUOeysCm5+AvJPODYOVS086FMlcaGgZaUkXv1b/RA+fPiwxvQ2zsAcvy9J69aWNV6Py12H7syuDI/OTX+0uuEqdqE4Ls9oPcTnXzlrjMgniNltnt3Nm5phk9eHy4G72lSfddWYZP/p5y2sEmlhFcinMxKxfn0HwWjuV1vicmtLDPW6Rzf0YYG+q2oF+nRneZW+5aEPBzLiCx4zzw2KYD91DnkUNdnjMmdzpsXpcwpz1xJnP3lgUSkrm88MYQtSQilsiW+C8OOgdrKtZUUoC7wQhfrCfUdSxmG3yoTRcixW+b4jS+Fuk9MlRTP+DTN3sMjTIXmlAalPaoP/DskvHZV6pTMQviX5pNOe/qr4dy897/VK2gUX2bD1TtmZcXwMeQF8sBn0wWZNPZ/j4Hih2xKkISIohTu1fDM0uyJPyCQoZEXLxZBLe4+yoFXotSpQNum91hnalginZ96weQEdn+sm7VM2htDBpMFVUB6mNSNftJODrPo8L4kUyffKZFxRc5rBLeobmpoMux3AsgtannrPjWXqPcnTyIlfs1S+ikmWewGQ03+OG0nJzdSVfDKEZDgY6NMfys5kt6lHaaDaskXnAwxUtre3l5xyrZ0Xh4z0bcwkBleqOgRVzMSM/0AqaL5eqR0arNc5dMSfF1Id8DsvnaNcn3TkN6Jc/PBg1Af//e29ks/TLnV6jkF5H+W5T7ckeY9d7ZX6IdYuXSYZNIOl0zgY0mmyzv9/fvdtP78bipWHRafbpcmuuHOi7846aBQZKqxWSAE7re2clqFxUuA0eGz6fBJ2OUn83owYioUcEmGpWKwxhexb3MZJTpyptwvRkIe93CACVAVjnEYDfqhJYzSJ7ZrsdbG5X33aOE3f0PMwpYuJHfWVP/ykkWmjIr2YszVnUwYwg3nYMorRebxKWpDvZXkeD/atJA01r1ubAsdFwca9IywGKt9UQCA3siZV+l0kGxBlaVCt5ni6YGZMW8zu15QcFByrK+ebMHNtE0EaXTPCubC2tpIUJc6kCnXCqiuqug0mBg+KZhox+6DI5El3LiODZWDyI4aKN0Jzjph98GRBzF6DriXTaUPROYwemmDKQXmxQZoruqkWc/y3aDZcguBTGI/HPcfg1+4Z8Hjpw0L3v4/i/RDv9+K4xVF+HsLutu6Obk/32x2eDsn9dfd3/32v9D+6QxtU")))) | 3,032.5 | 6,038 | 0.963561 | 203 | 6,065 | 28.788177 | 0.985222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.145143 | 0.00033 | 6,065 | 2 | 6,038 | 3,032.5 | 0.818737 | 0 | 0 | 0 | 0 | 0.5 | 0.985823 | 0.985823 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 10 |
a52e1be7bf08280f0a30254ff97bbfb15514d680 | 31,060 | py | Python | cvxpy/tests/test_solvers.py | NunoEdgarGFlowHub/cvxpy | 43270fcc8af8fc4742f1b3519800b0074f2e6693 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cvxpy/tests/test_solvers.py | NunoEdgarGFlowHub/cvxpy | 43270fcc8af8fc4742f1b3519800b0074f2e6693 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cvxpy/tests/test_solvers.py | NunoEdgarGFlowHub/cvxpy | 43270fcc8af8fc4742f1b3519800b0074f2e6693 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | """
Copyright 2013 Steven Diamond
This file is part of CVXPY.
CVXPY is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CVXPY is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CVXPY. If not, see <http://www.gnu.org/licenses/>.
"""
import cvxpy as cvx
import numpy as np
from cvxpy.tests.base_test import BaseTest
class TestSolvers(BaseTest):
""" Unit tests for solver specific behavior. """
def setUp(self):
self.a = cvx.Variable(name='a')
self.b = cvx.Variable(name='b')
self.c = cvx.Variable(name='c')
self.x = cvx.Variable(2, name='x')
self.y = cvx.Variable(3, name='y')
self.z = cvx.Variable(2, name='z')
self.A = cvx.Variable((2, 2), name='A')
self.B = cvx.Variable((2, 2), name='B')
self.C = cvx.Variable((3, 2), name='C')
# TODO this works on some machines.
# def test_solver_errors(self):
# """Tests that solver errors throw an exception.
# """
# # For some reason CVXOPT can't handle this problem.
# expr = 500*self.a + square(self.a)
# prob = cvx.Problem(cvx.Minimize(expr))
# with self.assertRaises(Exception) as cm:
# prob.solve(solver=cvx.CVXOPT)
# self.assertEqual(str(cm.exception),
# "Solver 'CVXOPT' failed. Try another solver.")
def test_ecos_options(self):
"""Test that all the ECOS solver options work.
"""
# Test ecos
# feastol, abstol, reltol, feastol_inacc, abstol_inacc, and reltol_inacc for tolerance values
# max_iters for the maximum number of iterations,
EPS = 1e-4
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
for i in range(2):
prob.solve(solver=cvx.ECOS, feastol=EPS, abstol=EPS, reltol=EPS,
feastol_inacc=EPS, abstol_inacc=EPS, reltol_inacc=EPS,
max_iters=20, verbose=True, warm_start=True)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
def test_ecos_bb_options(self):
"""Test that all the ECOS BB solver options work.
"""
# 'mi_maxiter'
# maximum number of branch and bound iterations (default: 1000)
# 'mi_abs_eps'
# absolute tolerance between upper and lower bounds (default: 1e-6)
# 'mi_rel_eps'
EPS = 1e-4
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)),
[self.x == cvx.Variable(2, boolean=True)])
for i in range(2):
prob.solve(solver=cvx.ECOS_BB, mi_max_iters=100, mi_abs_eps=1e-6,
mi_rel_eps=1e-5, verbose=True, warm_start=True)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
def test_scs_options(self):
"""Test that all the SCS solver options work.
"""
# Test SCS
# MAX_ITERS, EPS, ALPHA, UNDET_TOL, VERBOSE, and NORMALIZE.
# If opts is missing, then the algorithm uses default settings.
# USE_INDIRECT = True
EPS = 1e-4
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
for i in range(2):
prob.solve(solver=cvx.SCS, max_iters=50, eps=EPS, alpha=EPS,
verbose=True, normalize=True, use_indirect=False)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
def test_cvxopt_options(self):
"""Test that all the CVXOPT solver options work.
"""
# TODO race condition when changing these values.
# 'maxiters'
# maximum number of iterations (default: 100).
# 'abstol'
# absolute accuracy (default: 1e-7).
# 'reltol'
# relative accuracy (default: 1e-6).
# 'feastol'
# tolerance for feasibility conditions (default: 1e-7).
# 'refinement'
# number of iterative refinement steps when solving KKT equations (default: 0 if the problem has no second-order cone or matrix inequality constraints; 1 otherwise).
if cvx.CVXOPT in cvx.installed_solvers():
EPS = 1e-7
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
for i in range(2):
prob.solve(solver=cvx.CVXOPT, feastol=EPS, abstol=EPS, reltol=EPS,
max_iters=20, verbose=True, kktsolver="chol",
refinement=2, warm_start=True)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
def test_cvxopt_glpk(self):
"""Test a basic LP with GLPK.
"""
# Either the problem is solved or GLPK is not installed.
if cvx.GLPK in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GLPK)
self.assertAlmostEqual(prob.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GLPK)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GLPK)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GLPK)
def test_cvxopt_glpk_mi(self):
"""Test a basic MILP with GLPK.
"""
# Either the problem is solved or GLPK is not installed.
if cvx.GLPK_MI in cvx.installed_solvers():
bool_var = cvx.Variable(boolean=True)
int_var = cvx.Variable(integer=True)
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)),
[self.x == bool_var, bool_var == 0])
prob.solve(solver=cvx.GLPK_MI, verbose=True)
self.assertAlmostEqual(prob.value, 0)
self.assertAlmostEqual(bool_var.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= int_var,
self.x[0] + 2 * self.x[1] <= 3*bool_var,
self.x[0] >= 0,
self.x[1] >= 0,
int_var == 3*bool_var,
int_var == 3]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GLPK_MI, verbose=True)
self.assertAlmostEqual(prob.value, -9)
self.assertAlmostEqual(int_var.value, 3)
self.assertAlmostEqual(bool_var.value, 1)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GLPK_MI)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GLPK_MI)
def test_gurobi(self):
"""Test a basic LP with Gurobi.
"""
if cvx.GUROBI in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GUROBI)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
# Gurobi's default lower bound for a decision variable is zero
# This quick test ensures that the cvxpy interface for GUROBI does *not* have that bound
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
# Boolean and integer version.
bool_var = cvx.Variable(boolean=True)
int_var = cvx.Variable(integer=True)
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)),
[self.x == bool_var, bool_var == 0])
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, 0)
self.assertAlmostEqual(bool_var.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= int_var,
self.x[0] + 2 * self.x[1] <= 3*bool_var,
self.x[0] >= 0,
self.x[1] >= 0,
int_var == 3*bool_var,
int_var == 3]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, -9)
self.assertAlmostEqual(int_var.value, 3)
self.assertAlmostEqual(bool_var.value, 1)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GUROBI)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GUROBI)
def test_gurobi_socp(self):
"""Test a basic SOCP with Gurobi.
"""
if cvx.GUROBI in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 2)), [self.x == 0])
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
(self.x[0] + 2 * self.x[1])**2 <= 9,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
# Gurobi's default lower bound for a decision variable is zero
# This quick test ensures that the cvxpy interface for GUROBI does *not* have that bound
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
# Boolean and integer version.
bool_var = cvx.Variable(boolean=True)
int_var = cvx.Variable(integer=True)
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 2)),
[self.x == bool_var, bool_var == 0])
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, 0)
self.assertAlmostEqual(bool_var.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= int_var,
(self.x[0] + 2 * self.x[1])**2 <= 9*bool_var,
self.x[0] >= 0,
self.x[1] >= 0,
int_var == 3*bool_var,
int_var == 3]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
self.assertAlmostEqual(prob.value, -9)
self.assertAlmostEqual(int_var.value, 3)
self.assertAlmostEqual(bool_var.value, 1)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GUROBI)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GUROBI)
def test_gurobi_dual(self):
"""Make sure Gurobi's dual result matches other solvers
"""
if cvx.GUROBI in cvx.installed_solvers():
constraints = [self.x == 0]
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)))
prob.solve(solver=cvx.GUROBI)
duals_gurobi = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_gurobi, duals_ecos)
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.GUROBI)
duals_gurobi = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_gurobi, duals_ecos)
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GUROBI)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GUROBI)
# I copied (and modified) the LP, SOCP, and dual GUROBI tests for MOSEK
def test_mosek(self):
"""Test a basic LP with Mosek.
"""
if cvx.MOSEK in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.MOSEK)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.MOSEK)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.MOSEK)
def test_mosek_socp(self):
"""Test a basic SOCP with Mosek.
"""
if cvx.MOSEK in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 2)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertAlmostEqual(prob.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
(self.x[0] + 2 * self.x[1])**2 <= 9,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.MOSEK)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.MOSEK)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.MOSEK)
def test_mosek_dual(self):
"""Make sure Mosek's dual result matches other solvers
"""
if cvx.MOSEK in cvx.installed_solvers():
constraints = [self.x == 0]
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)))
prob.solve(solver=cvx.MOSEK)
duals_mosek = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_mosek, duals_ecos)
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.MOSEK)
duals_mosek = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_mosek, duals_ecos)
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.MOSEK)
def test_mosek_sdp(self):
"""Make sure Mosek's dual result matches other solvers
"""
# TODO: should work with PSD (>>, <<).
if cvx.MOSEK in cvx.installed_solvers():
# Test optimality gap for equilibration.
m = 3
n = 3
Art = np.random.randn(n, n)
A = Art.T.dot(Art)
Ainv = np.linalg.inv(A)
t = cvx.Variable()
Z = cvx.Variable(n, n)
d = cvx.Variable(n)
D = cvx.diag(d)
constr = [Art*D*Art.T - np.eye(n) == cvx.Variable((n, n), PSD=True), cvx.Variable((n, n), PSD=True) == t*np.eye(n) - Art*D*Art.T, d >= 0]
prob = cvx.Problem(cvx.Minimize(t), constr)
prob.solve(solver=cvx.MOSEK)
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.MOSEK)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.MOSEK)
def test_mosek_params(self):
if cvx.MOSEK in cvx.installed_solvers():
import numpy as np
import numpy.random as rnd
import mosek
n = 10
m = 4
A = rnd.randn(m, n)
x = rnd.randn(n)
y = A.dot(x)
# Solve a simple basis pursuit problem for testing purposes.
z = cvx.Variable(n)
objective = cvx.Minimize(cvx.norm1(z))
constraints = [A * z == y]
problem = cvx.Problem(objective, constraints)
invalid_mosek_params = {
"dparam.basis_tol_x": "1e-8"
}
with self.assertRaises(ValueError):
problem.solve(solver=cvx.MOSEK, mosek_params=invalid_mosek_params)
with self.assertRaises(ValueError):
problem.solve(solver=cvx.MOSEK, invalid_kwarg=None)
mosek_params = {
mosek.dparam.basis_tol_x: 1e-8,
"MSK_IPAR_INTPNT_MAX_ITERATIONS": 20
}
problem.solve(solver=cvx.MOSEK, mosek_params=mosek_params)
def test_gurobi_warm_start(self):
"""Make sure that warm starting Gurobi behaves as expected
Note: This only checks output, not whether or not Gurobi is warm starting internally
"""
if cvx.GUROBI in cvx.installed_solvers():
import numpy as np
A = cvx.Parameter((2, 2))
b = cvx.Parameter(2)
h = cvx.Parameter(2)
c = cvx.Parameter(2)
A.value = np.matrix([[1, 0], [0, 0]])
b.value = np.array([1, 0])
h.value = np.array([2, 2])
c.value = np.array([1, 1])
objective = cvx.Maximize(c[0] * self.x[0] + c[1] * self.x[1])
constraints = [self.x[0] <= h[0],
self.x[1] <= h[1],
A * self.x == b]
prob = cvx.Problem(objective, constraints)
result = prob.solve(solver=cvx.GUROBI, warm_start=True)
self.assertEqual(result, 3)
self.assertItemsAlmostEqual(self.x.value, [1, 2])
orig_objective = result
orig_x = self.x.value
# Change A and b from the original values
A.value = np.matrix([[0, 0], [0, 1]]) # <----- Changed
b.value = np.array([0, 1]) # <----- Changed
h.value = np.array([2, 2])
c.value = np.array([1, 1])
# Without setting update_eq_constrs = False, the results should change to the correct answer
result = prob.solve(solver=cvx.GUROBI, warm_start=True)
self.assertEqual(result, 3)
self.assertItemsAlmostEqual(self.x.value, [2, 1])
# Change h from the original values
A.value = np.matrix([[1, 0], [0, 0]])
b.value = np.array([1, 0])
h.value = np.array([1, 1]) # <----- Changed
c.value = np.array([1, 1])
# Without setting update_ineq_constrs = False, the results should change to the correct answer
result = prob.solve(solver=cvx.GUROBI, warm_start=True)
self.assertEqual(result, 2)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
# Change c from the original values
A.value = np.matrix([[1, 0], [0, 0]])
b.value = np.array([1, 0])
h.value = np.array([2, 2])
c.value = np.array([2, 1]) # <----- Changed
# Without setting update_objective = False, the results should change to the correct answer
result = prob.solve(solver=cvx.GUROBI, warm_start=True)
self.assertEqual(result, 4)
self.assertItemsAlmostEqual(self.x.value, [1, 2])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.GUROBI, warm_start=True)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.GUROBI)
def test_xpress(self):
"""Test a basic LP with Xpress.
"""
if cvx.XPRESS in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.XPRESS)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.XPRESS)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.XPRESS)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.XPRESS)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.XPRESS)
def test_xpress_socp(self):
"""Test a basic SOCP with Xpress.
"""
if cvx.XPRESS in cvx.installed_solvers():
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 2)), [self.x == 0])
prob.solve(solver=cvx.XPRESS)
self.assertAlmostEqual(prob.value, 0)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
(self.x[0] + 2 * self.x[1])**2 <= 9,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.XPRESS)
self.assertAlmostEqual(prob.value, -9)
self.assertItemsAlmostEqual(self.x.value, [1, 1])
objective = cvx.Minimize(self.x[0])
constraints = [self.x[0] >= -100, self.x[0] <= -10, self.x[1] == 1]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.XPRESS)
self.assertItemsAlmostEqual(self.x.value, [-100, 1])
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.XPRESS)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.XPRESS)
def test_xpress_dual(self):
"""Make sure Xpress' dual result matches other solvers
"""
if cvx.XPRESS in cvx.installed_solvers():
constraints = [self.x == 0]
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)))
prob.solve(solver=cvx.XPRESS)
duals_xpress = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_xpress, duals_ecos)
# Example from http://cvxopt.org/userguide/coneprog.html?highlight=solvers.lp#cvxopt.solvers.lp
objective = cvx.Minimize(-4 * self.x[0] - 5 * self.x[1])
constraints = [2 * self.x[0] + self.x[1] <= 3,
self.x[0] + 2 * self.x[1] <= 3,
self.x[0] >= 0,
self.x[1] >= 0]
prob = cvx.Problem(objective, constraints)
prob.solve(solver=cvx.XPRESS)
duals_xpress = [x.dual_value for x in constraints]
prob.solve(solver=cvx.ECOS)
duals_ecos = [x.dual_value for x in constraints]
self.assertItemsAlmostEqual(duals_xpress, duals_ecos)
else:
with self.assertRaises(Exception) as cm:
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
prob.solve(solver=cvx.XPRESS)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % cvx.XPRESS)
def test_installed_solvers(self):
"""Test the list of installed solvers.
"""
from cvxpy.reductions.solvers.defines import (SOLVER_MAP_CONIC, SOLVER_MAP_QP,
INSTALLED_SOLVERS)
prob = cvx.Problem(cvx.Minimize(cvx.norm(self.x, 1)), [self.x == 0])
for solver in SOLVER_MAP_CONIC.keys():
if solver in INSTALLED_SOLVERS:
prob.solve(solver=solver)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
else:
with self.assertRaises(Exception) as cm:
prob.solve(solver=solver)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % solver)
for solver in SOLVER_MAP_QP.keys():
if solver in INSTALLED_SOLVERS:
prob.solve(solver=solver)
self.assertItemsAlmostEqual(self.x.value, [0, 0])
else:
with self.assertRaises(Exception) as cm:
prob.solve(solver=solver)
self.assertEqual(str(cm.exception), "The solver %s is not installed." % solver)
| 46.847662 | 173 | 0.551771 | 3,956 | 31,060 | 4.281092 | 0.081901 | 0.068788 | 0.035073 | 0.064832 | 0.817312 | 0.802137 | 0.775803 | 0.754842 | 0.746162 | 0.7327 | 0 | 0.027285 | 0.319156 | 31,060 | 662 | 174 | 46.918429 | 0.773585 | 0.170766 | 0 | 0.774468 | 0 | 0 | 0.020744 | 0.001174 | 0 | 0 | 0 | 0.004532 | 0.221277 | 1 | 0.042553 | false | 0 | 0.017021 | 0 | 0.061702 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a5b3487eba41d4d820a0950c8882faaf68951d9b | 147 | py | Python | Python_ch5_10/rectangle.py | ninhnguyen01/Python_Book | e5e372f1895b06e908cd0dd07dc68a260c34d7ad | [
"Apache-2.0"
] | null | null | null | Python_ch5_10/rectangle.py | ninhnguyen01/Python_Book | e5e372f1895b06e908cd0dd07dc68a260c34d7ad | [
"Apache-2.0"
] | null | null | null | Python_ch5_10/rectangle.py | ninhnguyen01/Python_Book | e5e372f1895b06e908cd0dd07dc68a260c34d7ad | [
"Apache-2.0"
] | null | null | null | # Rectangle Module (5.10 program)
def area(width, length):
return width * length
def perimeter(width, length):
return 2 * (width + length) | 24.5 | 33 | 0.687075 | 20 | 147 | 5.05 | 0.6 | 0.435644 | 0.336634 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033898 | 0.197279 | 147 | 6 | 34 | 24.5 | 0.822034 | 0.210884 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
3c56e382ea99bc95637381a721c6966d95920cbc | 5,935 | py | Python | moco_wrapper/util/generator/item_generator.py | nethad/moco-wrapper | 012f9aab6e9fa60e3ccdf7254f0366b108651899 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | moco_wrapper/util/generator/item_generator.py | nethad/moco-wrapper | 012f9aab6e9fa60e3ccdf7254f0366b108651899 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | moco_wrapper/util/generator/item_generator.py | nethad/moco-wrapper | 012f9aab6e9fa60e3ccdf7254f0366b108651899 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | class ItemGenerator(object):
def generate_title(
self,
title
):
"""generate an invoice item of type "title"
:param title: title the item should have
:returns: the item
"""
return {
"type": "title",
"title": title
}
def generate_description(
self,
description
):
"""generate an invoice item of type "description"
:param description: description the item should have
:returns: the item
"""
return {
"type" : "description",
"description": description
}
def generate_pagebreak(self):
return {
"type": "page-break"
}
def generate_subtotal(self, title):
return {
"title": title,
"type": "subtotal"
}
def generate_separator(
self,
):
"""generate an invoice item of type "separator"
:returns: the item
"""
return {
"type": "separator"
}
class OfferItemGenerator(ItemGenerator):
def generate_item(
self,
title: str,
quantity: float = None,
unit: int = None ,
unit_price: float= None,
net_total: float = None,
optional = False
):
"""generate an invoice if tyoe "item"
:param title: title of the item
:param quantity: quantity of the supplied item
:param unit: unit name of the supplied item
:param unit_price: unit price of the supplied item
:param net_total: net total sum (either this is supplied or unit, unit_price, and quantity)
:param optional: wehter the item is an optional item or not (default False)
:returns: the item
"""
data = {
"type": "item",
"title": title
}
for key, value in (
("quantity", quantity),
("unit", unit),
("unit_price", unit_price),
("net_total", net_total),
("optional", optional)
):
if value is not None:
data[key] = value
return data
def generate_detail_postion(
self,
title: str,
quantity: float,
unit: str,
unit_price: float,
optional: bool = False
):
"""
generates a detailed position item to be used in an offer items list (for example hours are a perfect example that can be split into units (a single hours set the unit, unit_price, and quantity))
:param title: title of the position item
:param quantity: how many of the things (i.e. how many hours)
:param unit: what is the thing (i.e. hours)
:param unit_price: price of a single thing (i.e. price of a single hour)
:param optional: if the position is optional or not (default False)
"""
return self.generate_item(title, quantity=quantity, unit=unit, unit_price=unit_price, optional=optional)
def generate_lump_position(
self,
title: str,
net_total: float,
optional: bool = False
):
"""
generates a general position item to be used in a offer list (use this if the postion cannot (or do not want) to split the position into units)
:param title: title of the position
:param net_total: total price of the postion
:param optional: if the position is optional or not (default False)
"""
return self.generate_item(title, net_total=net_total, optional=optional)
class InvoiceItemGenerator(ItemGenerator):
def generate_item(
self,
title: str,
quantity: float = None,
unit: str = None,
unit_price: float = None,
net_total: float = None
):
"""generate an invoice if tyoe "item"
:param title: title of the item
:param quantity: quantity of the supplied item
:param unit: unit name of the supplied item
:param unit_price: unit price of the supplied item
:param net_total: net total sum (either this is supplied or unit, unit_price, and quantity)
:returns: the item
"""
data = {
"type": "item",
"title": title
}
for key, value in (
("quantity", quantity),
("unit", unit),
("unit_price", unit_price),
("net_total", net_total),
):
if value is not None:
data[key] = value
return data
def generate_detail_postion(
self,
title: str,
quantity: float,
unit: str,
unit_price: float
):
"""
generates a detailed position item to be used in an offer items list (for example hours are a perfect example that can be split into units (a single hours set the unit, unit_price, and quantity))
:param title: title of the position item
:param quantity: how many of the things (i.e. how many hours)
:param unit: what is the thing (i.e. hours)
:param unit_price: price of a single thing (i.e. price of a single hour)
:param optional: if the position is optional or not (default False)
"""
return self.generate_item(title, quantity=quantity, unit=unit, unit_price=unit_price)
def generate_lump_position(
self,
title: str,
net_total: float
):
"""
generates a general position item to be used in a offer list (use this if the postion cannot (or do not want) to split the position into units)
:param title: title of the position
:param net_total: total price of the postion
:param optional: if the position is optional or not (default False)
"""
return self.generate_item(title, net_total=net_total)
| 29.974747 | 203 | 0.571019 | 715 | 5,935 | 4.662937 | 0.13007 | 0.059388 | 0.031194 | 0.030594 | 0.859028 | 0.85183 | 0.806239 | 0.806239 | 0.806239 | 0.758248 | 0 | 0 | 0.349284 | 5,935 | 197 | 204 | 30.126904 | 0.863283 | 0.431677 | 0 | 0.648148 | 1 | 0 | 0.061517 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101852 | false | 0 | 0 | 0.018519 | 0.231481 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
592ee03b2b2da8d3c8eaac76f11afd69db6c1086 | 4,490 | py | Python | tests/geographic/local/test_get_layer_field_historical.py | PEM-Humboldt/regi0 | 0d64587d5d87f57cddfc7a67bb8baf74cd70adf2 | [
"MIT"
] | null | null | null | tests/geographic/local/test_get_layer_field_historical.py | PEM-Humboldt/regi0 | 0d64587d5d87f57cddfc7a67bb8baf74cd70adf2 | [
"MIT"
] | 15 | 2022-02-03T11:38:37.000Z | 2022-03-09T23:23:04.000Z | tests/geographic/local/test_get_layer_field_historical.py | PEM-Humboldt/regi0 | 0d64587d5d87f57cddfc7a67bb8baf74cd70adf2 | [
"MIT"
] | null | null | null | """
Test cases for the regi0.geographic.local.get_layer_field_historical function.
"""
import numpy as np
import pandas as pd
from regi0.geographic.local import get_layer_field_historical
def test_direction_backward(records, data_path):
result = get_layer_field_historical(
records,
data_path.joinpath("gpkg/admin1.gpkg"),
date_col="eventDate",
field="dptos",
direction="backward",
)
expected = pd.Series(
[
"BOYACA",
"BOYACA",
"BOYACA",
"BOYACA",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"BOYACA",
np.nan,
np.nan,
"BOYACA",
"BOYACA",
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
np.nan,
]
)
pd.testing.assert_series_equal(result, expected, check_names=False)
def test_direction_nearest(records, data_path):
result = get_layer_field_historical(
records,
data_path.joinpath("gpkg/admin1.gpkg"),
date_col="eventDate",
field="dptos",
direction="nearest",
)
expected = pd.Series(
[
"BOYACA",
"BOYACA",
"BOYACA",
"BOYACA",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"BOYACA",
"BOYACA",
np.nan,
"BOYACA",
"BOYACA",
np.nan,
np.nan,
"SANTANDER",
np.nan,
np.nan,
np.nan,
np.nan,
]
)
pd.testing.assert_series_equal(result, expected, check_names=False)
def test_direction_forward(records, data_path):
result = get_layer_field_historical(
records,
data_path.joinpath("gpkg/admin1.gpkg"),
date_col="eventDate",
field="dptos",
direction="forward",
)
expected = pd.Series(
[
"BOYACA",
"BOYACA",
"BOYACA",
"BOYACA",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"BOYACA",
np.nan,
np.nan,
np.nan,
"BOYACA",
np.nan,
np.nan,
"BOYACA",
np.nan,
np.nan,
"SANTANDER",
np.nan,
np.nan,
np.nan,
np.nan,
]
)
pd.testing.assert_series_equal(result, expected, check_names=False)
def test_source(records, data_path):
result, source = get_layer_field_historical(
records,
data_path.joinpath("gpkg/admin1.gpkg"),
date_col="eventDate",
field="dptos",
direction="forward",
return_source=True,
)
expected = pd.Series(
[
"admin1_2011",
"admin1_2011",
"admin1_2011",
"admin1_2011",
"admin1_2003",
"admin1_2003",
"admin1_2003",
"admin1_2003",
np.nan,
np.nan,
np.nan,
"admin1_1973",
np.nan,
np.nan,
"admin1_2011",
np.nan,
np.nan,
"admin1_1973",
np.nan,
np.nan,
np.nan,
np.nan,
]
)
pd.testing.assert_series_equal(source, expected, check_names=False)
def test_folder(records, data_path):
result = get_layer_field_historical(
records,
data_path.joinpath("shp/"),
date_col="eventDate",
field="dptos",
direction="nearest",
)
expected = pd.Series(
[
"BOYACA",
"BOYACA",
"BOYACA",
"BOYACA",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"SANTANDER",
"BOYACA",
"BOYACA",
np.nan,
"BOYACA",
"BOYACA",
np.nan,
np.nan,
"SANTANDER",
np.nan,
np.nan,
np.nan,
np.nan,
]
)
pd.testing.assert_series_equal(result, expected, check_names=False)
| 23.14433 | 78 | 0.452116 | 390 | 4,490 | 5.023077 | 0.138462 | 0.114855 | 0.103624 | 0.148035 | 0.85707 | 0.854007 | 0.818785 | 0.795304 | 0.778458 | 0.765186 | 0 | 0.024073 | 0.435635 | 4,490 | 193 | 79 | 23.264249 | 0.749013 | 0.017372 | 0 | 0.815642 | 0 | 0 | 0.156903 | 0 | 0 | 0 | 0 | 0 | 0.027933 | 1 | 0.027933 | false | 0 | 0.01676 | 0 | 0.044693 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3cc7ffeb35daf9f49f500126829c9ccae0a9c7c1 | 155 | py | Python | datacode/models/variables/compare.py | nickderobertis/data-code | b4649a1702ccd6b68995e58cc53d74544ff34294 | [
"MIT"
] | null | null | null | datacode/models/variables/compare.py | nickderobertis/data-code | b4649a1702ccd6b68995e58cc53d74544ff34294 | [
"MIT"
] | 44 | 2020-02-01T03:28:59.000Z | 2022-03-12T00:35:17.000Z | datacode/models/variables/compare.py | nickderobertis/data-code | b4649a1702ccd6b68995e58cc53d74544ff34294 | [
"MIT"
] | 1 | 2021-08-09T11:10:57.000Z | 2021-08-09T11:10:57.000Z | from typing import Callable
def functions_are_equal(func: Callable, func2: Callable) -> bool:
return func.__code__.co_code == func2.__code__.co_code
| 25.833333 | 65 | 0.774194 | 22 | 155 | 4.909091 | 0.636364 | 0.111111 | 0.185185 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014925 | 0.135484 | 155 | 5 | 66 | 31 | 0.791045 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
59d876f9dc90d7eae5dc21d79b8133d2c9627334 | 188 | py | Python | Qanalysis/__init__.py | eunjongkim/Qanalysis | 3034bc7085bea3d7db26ad80ddc09586dbcb2e1a | [
"MIT"
] | 1 | 2019-12-18T08:25:33.000Z | 2019-12-18T08:25:33.000Z | Qanalysis/__init__.py | eunjongkim/Qanalysis | 3034bc7085bea3d7db26ad80ddc09586dbcb2e1a | [
"MIT"
] | null | null | null | Qanalysis/__init__.py | eunjongkim/Qanalysis | 3034bc7085bea3d7db26ad80ddc09586dbcb2e1a | [
"MIT"
] | null | null | null | # from .frequency_domain import SingleSidedS11Fit, WaveguideCoupledS21Fit
from .time_domain import *
from .frequency_domain import *
from .readout import *
from .helper_functions import *
| 31.333333 | 73 | 0.824468 | 21 | 188 | 7.190476 | 0.47619 | 0.238411 | 0.251656 | 0.331126 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024096 | 0.117021 | 188 | 5 | 74 | 37.6 | 0.885542 | 0.37766 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
abc5d7e104251f72558b72545e7794efd4db51c1 | 212 | py | Python | mjmpc/value_functions/__init__.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | 2 | 2021-08-15T22:23:50.000Z | 2021-12-03T13:09:13.000Z | mjmpc/value_functions/__init__.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | null | null | null | mjmpc/value_functions/__init__.py | mohakbhardwaj/mjmpc | 097e8d9bdaf0b3a15afa39030b2f53b00dfa25de | [
"Apache-2.0"
] | 1 | 2022-02-18T10:22:49.000Z | 2022-02-18T10:22:49.000Z | from .linear_val_func import LinearVF
from .quadratic_val_func import QuadraticVF
from .linear_time_varying_val_func import LinearTimeVaryingVF
from .quadratic_time_varying_val_func import QuadraticTimeVaryingVF
| 42.4 | 67 | 0.90566 | 28 | 212 | 6.428571 | 0.428571 | 0.155556 | 0.288889 | 0.2 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075472 | 212 | 4 | 68 | 53 | 0.918367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
abc7e278907ff4d28b3db632a7986e7b14e7ab61 | 578 | py | Python | venv/Lib/site-packages/tensorflow/_api/v2/math/special/__init__.py | rexliu3/StockTradingBotCloud | 46b732b9c05f73bc0e856a3c4a16854b6d12e18e | [
"MIT"
] | null | null | null | venv/Lib/site-packages/tensorflow/_api/v2/math/special/__init__.py | rexliu3/StockTradingBotCloud | 46b732b9c05f73bc0e856a3c4a16854b6d12e18e | [
"MIT"
] | null | null | null | venv/Lib/site-packages/tensorflow/_api/v2/math/special/__init__.py | rexliu3/StockTradingBotCloud | 46b732b9c05f73bc0e856a3c4a16854b6d12e18e | [
"MIT"
] | 1 | 2020-06-28T11:47:47.000Z | 2020-06-28T11:47:47.000Z | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.math.special namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.ops.special_math_ops import dawsn
from tensorflow.python.ops.special_math_ops import expint
from tensorflow.python.ops.special_math_ops import fresnel_cos
from tensorflow.python.ops.special_math_ops import fresnel_sin
from tensorflow.python.ops.special_math_ops import spence
del _print_function
| 34 | 82 | 0.84083 | 89 | 578 | 5.191011 | 0.426966 | 0.207792 | 0.21645 | 0.248918 | 0.495671 | 0.495671 | 0.495671 | 0.495671 | 0.21645 | 0 | 0 | 0 | 0.096886 | 578 | 16 | 83 | 36.125 | 0.885057 | 0.290657 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.875 | 0 | 0.875 | 0.25 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
abc8938d53f4014edf28871ba11962eecde9ed68 | 1,239 | py | Python | v1/teaching.py | mmcinnestaylor/personal-homepage | c678db3e1110262035a8fe65a19832b3dec0c49f | [
"MIT"
] | null | null | null | v1/teaching.py | mmcinnestaylor/personal-homepage | c678db3e1110262035a8fe65a19832b3dec0c49f | [
"MIT"
] | null | null | null | v1/teaching.py | mmcinnestaylor/personal-homepage | c678db3e1110262035a8fe65a19832b3dec0c49f | [
"MIT"
] | null | null | null | teaching_appointments = {
'2021':
[
{
'course':'CGS 2100 Microcomputer Applications for Business and Economics',
'semester':'Spring',
'position':'Undergraduate Teaching Assistant',
'website': None
},
],
'2020':
[
{
'course':'COP 3363 Programming I for Majors',
'semester':'Spring',
'position':'Undergraduate Teaching Assistant',
'website': None
},
{
'course':'CGS 2100 Microcomputer Applications for Business and Economics',
'semester':'Fall',
'position':'Undergraduate Teaching Assistant',
'website': None
}
],
'2019':
[
{
'course':'COP 3014 Programming I for Non-Majors',
'semester':'Spring',
'position':'Undergraduate Teaching Assistant',
'website': None
},
{
'course':'COP 3014 Programming I for Non-Majors',
'semester':'Fall',
'position':'Undergraduate Teaching Assistant',
'website': None
}
],
'2018':
[
{
'course':'CGS 2100 Microcomputer Applications for Business and Economics',
'semester':'Spring',
'position':'Undergraduate Teaching Assistant',
'website': None
},
{
'course':'COP 3014 Programming I for Non-Majors',
'semester':'Fall',
'position':'Undergraduate Teaching Assistant',
'website': None
}
]
}
| 21.736842 | 77 | 0.645682 | 120 | 1,239 | 6.658333 | 0.25 | 0.18398 | 0.254068 | 0.332916 | 0.9199 | 0.9199 | 0.9199 | 0.9199 | 0.853567 | 0.814768 | 0 | 0.044266 | 0.19774 | 1,239 | 56 | 78 | 22.125 | 0.759557 | 0 | 0 | 0.535714 | 0 | 0 | 0.652946 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
f9fa143d13298eaa56815c87e223774c0ba2dad5 | 7,697 | py | Python | no5/no5.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | no5/no5.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | no5/no5.py | shopetan/ksnctf | f4b7724c956a5dc0590c83ff6f5fb6a249b81371 | [
"MIT"
] | null | null | null | import codecs
import base64
Basic = base64.b64decode("Vm0wd2QyUXlVWGxWV0d4V1YwZDRWMVl3WkRSV01WbDNXa1JTV0ZKdGVGWlZNakExVmpBeFYySkVUbGhoTWsweFZtcEdZV015U2tWVQpiR2hvVFZWd1ZWWnRjRWRUTWxKSVZtdFdVZ3BpVlZwWVZtMTRjMDB4V25GUmJVWlVUV3hLU1ZadGRHdFhRWEJwVW01Q1VGZFhNSGhpCk1WWlhWMjVHVW1KVldtRldha0Y0VGxaVmVXUkdaRmRWV0VKd1ZXcEtiMlJzV2tkWGJHUnJDazFXY0ZoV01qVlRZV3hLV0ZWdFJsZGgKYTFwTVZURmFZV1JIVWtkYVJscGhUVEJLZDFadGVHRmtNV1JYVjI1U1RsWkdTbkZEYXpGRlVXcFNWMDFxVmxSWlYzaExWMFpXYzFacwpWbGNLVFRKb1RWWlVSbUZaVjFKSVZXdHNWV0pYYUZkV01GWkxWbFprV0dWSGRHbE5iRXA2VmpKMGExbFdUa2xSYmtwRVlYcEdlbFl5CmRHOVdNREZ4Vm14U1YxSXphR2hWYWtaUFl6RmFjd3BXYkdOTFZGUkJNRTFHV2toa1IwWm9UV3MxTUZWdGRHdFpWa2w1WVVaT1YwMUcKV2t4V2JGcHJWMGRTU0U5V1NrNVdiSEJKVmpKMFlXSXhVbk5VYTJob1UwVktSVmxZY0VkWFJsbDVDbVZIT1ZkTlJFWjRWbTE0VTFZeApTWHBoUjJoV1lXdGFVRmw2Um1GamQzQlhZa2RPVEZaR1VrdGlNVkpYVjJ4V1UySlZXbUZXYWtaTFUxWmFXR1JIT1doTlZXdzFXVlZhCmExWXdNVWNLVjJ0NFYySkdjR2hWYWtaaFpFWktkR1JGTlZkTlZYQmFWbTF3U2sxV1ZYbFNiazVVWWtkNFYxbHRkRXRTVm14WlkwVmsKV0ZKc1ZqVkRiVlpJVDFab1UwMUdXVEJYVkVKdldWWmtjd3BYYTFwWVlUTlNhRlZyVm1GamJIQkdWMnQwYW1RelFtaFZha3ByVkVaawpWMVZyVGxkTlJGWklWMnRvVDFkSFNsWlhiR1JWVmpOT05GUnJXbXRqYlVaSFZHMW9UbFpZUVhoV1ZtUTBDbUV4V1hkTlZXTkxWakowCk5GWXlTa2RqU0VwWFRVZFNXRlV3V2t0ak1WWnlUbFprVGxaWVFtRldiVEYzVXpBeFNGSllhRmhpYkVwVVZqQmtVMVZHV25SbFIwWlQKVm0xNFdsa3dWbXNLVjBaS2RHUkVUa1JpUjFJd1ZERmFiMVV3TVVkWFZFSllWa1ZLZGxWNlJscGxVWEJUWW10d2NWVnNhRzlXTVd4WQpaRWhrVmxKc1ZqUldNbmhQWVcxUmVsRnNiRnBpUjFGM1ZrVmFZUXBrUjFKSFdrWndWMkpJUWxsV2FrbzBWakZWZVZOc1dsaGlWVnBZCldWZDBZVlJHVlhoWGJVWllVakZLU1ZReFpHOVViRnBZWkhwR1dGWnNXbkpEYlVsNFYyeGtXR0V4YkV4V1ZFb3dDazVHV1hsVGEyUnEKVTBWd1dGUlZaRk5YUmxWM1YydDBhazFXV25sVWJGcHJWR3hhV1ZGdFJsZGlXRUpNVkZWa1NtVkdWbGxoUjJ4VFlsWktWbGRXVWtkawpNVnBYWWtoT1YySlZXbFFLVm0weE5HVldXWGxPVjNOTFZtcEtTMUl4WkhGUmExSm9aV3hhWVZZeWRHRmhNVkp6VTJ0YVdHRnNTbGhaCmJGSkdUVVpXVlZKc2NHdGtNMEpQVm14a2IxWXhiRlZVYlRsWVVtMTRlZ3BaVldNMVlXMUtTVkZyYUZkTmJrMHhXVmN4VW1Wc1JuVlMKYkZwb1lUSTVNMVpyVm1GWlVYQllVbFJHUmxWdGVFdFViVVY1WkhwQ1YyRnJhM2hXVkVwSFl6Rk9jMkZHV21sU01VcG9DbGRYZEdGawpNa1pIVmxoa1dHSklRbk5XYkZKWFYwWmtjbGR0ZEZkTlJFWktWVmQ0ZDFkR1duTlhiV2hFWWtaR05GWXhhR3RVYkZwWVZHdDRWMkZyCmIzZERhelZIVjFoc1ZHRXlVbkVLVlRCV2QxZEdVbFphUms1WFVteFdNMVl5ZERCaE1VbDRVMnRrVldKR2NISldSM2hoVjFaR2RGSnMKWkdsWFJVcE5Wa1pXWVdNeFpFZFViR3hwVW1zMVdWVnFTbTlXYkZweFVXMTBWZ3BOVjFKWVdXdG9VMkV4VGtoVmJGRkxWbTB3ZUU1RwpaSE5oTTJSWFlsaE9URlpxUW1GVE1sSklWV3RXVldFeFNuQlZha1pLWkRGYVJWSnRSbWhOVmtwNlZUSjBZVmRIUm5OVGJHaGFDbUpHClNrZFVWVnBYVmxaS2RHUkdUbXROTUVwYVYxY3hlazFYVGxkV2JrNW9VbnBzV0ZSV1pEUmxaM0JhVmxkTk1WWnRlRXRrVmtaeVlVWmsKVG1Kc1JqTlhWbU40VlcxV2MxSnVUbWdLVW01Q2IxUlhlRXRWVmxweVZtMUdhR1F6UWxsVmFrWkxVMVpSZUZkcmRHaFdiSEI2V1RCUwpZVll5Um5KaE0yaFdZV3RhV0ZwRldrOWpNV1J6WVVkc1UwMXRhRzlEYkdSWVpFZEdhd3BOYTFwSVZqSTFSMVV5U2taT1dFWlZWbTFTClZGUlZXbGRrUjFaSVVteGFUbUV6UWt0V1ZscHZZVEZrUjFkdVRsaGlWMmhGV1d0YVIwNUdXWGhoUjNSVllrWndXVlpIZERSV01rWnkKQ21JelpFUmhlbFpJV1d0YWExWkhSWGhqUm10TFYxZDRhMkl4WkVkVmJGcGhVbXMxVjFWdGVHRk5SbXQ2WTBaa1dGSnJiRE5aTUZacgpWbGRLUjJOSVNsZFNNMmhvVmpGYVIyTnRVa2NLV2tkc1YxSldiRFpXYkdoM1VXMVdSMVJyWkZSaVIzaHZWV3BDWVZaR1duRlRiVGxYCllrZFNXVnBGWkRCVWQzQlRZa2QzTUZkWGRHOVZNa1owVm01S1dHSkdSa3hXYlRCM1pVVTFTQXBXYms1WVlteEtVRlpxVGs5VVJscHgKVVcxR1ZFMXJNVFZWTW5SWFZqSkZlRk51UWxkaVdGSXpWVEo0WVZKV1NuUlNiV2hPVm10d05sWlVTakJaVm1SSFdrVm9hRkp0YUdGRApiVVY1Q2xWck9XRldWbkJ5Vm1wR2EyUkhVa2hqUjNST1RVVndZVll4V2xOU01sRjRXa1prYVZKc1dsWlphMVV4WWpGV2RHVkhSbXhWCldFSllXV3hTUjFOR2JGaE5WWFJVVWpGSk1sVXllR0VLWVZaa1NHRkliRmhXYlU0MFZsY3hWMk14U25WVWJXZExWVzAxUTJJeFVsaGwKUlhSV1ZteHdlVlp0TVVkaFIxRjZVV3hzVm1GcldreFZNVnBYWkVkV1NHUkdWbWxTV0VKSlZtcEtNQXBqTVZsNVVtNUthV1ZyU21GWgpWM1IzVTBac05sSnJPV3BOYTFwSVZqSXhjMVV3TVZaalJtaEVZa1p3TUZrd1ZUVldVWEJPVWtaYVNWWnNZekZUTVdSSVUydHNVbUpyCk5WaFphMXBMQ2xkR1duRlNiRXBzVW0xU01GcEZXbXRVYkVwR1YydDBWMVp0VVhkYVJFWmFaVlpPY21GR1dsZFNWWEJHVjFkNFYxWXcKTlVkWFdHaG9Vak5TVmxsclduZFhWbHBJWkVSU1YwMXJXbmtLUTIxSmVscEZVbWhsYkZwSlZtcEdiMkV4VW5OYVJXUllZbFJvVEZacwpaSGRUTWtsNFdrVmtWbUpHY0ZsWmEyUlRWVVpXZEdWSVpHeGlSbXcxV2tWa01HRkZNVlppUkZKV1RXNVNjZ3BXYkdSTFVqSk9TVk5zCmNGZGlTRUpSVmxjeE5GUXlUblJWYTJOTFYydGtjMWRHU2xaalJUVlhZVEZ3V0ZsVVJrOWtSbHB5V2taa2FWSXphSFpXVjNCRFdWWmEKVjFadVRtaFNWVFZYQ2xWdGVIZFdiRlpZVFZSQ1ZXUXpRbFJaYTJRelRVWmtXR1JIT1ZSTlZtdzFWV3hvZDFadFNrZFNhM2hYWWxSQwpNMXBWVlRGVFFYQlhZa2RTV1ZsdGVFdFZSbHB6VlcwNVZWSnNjSGtLVmxkMGExWkZNWEpOVkZKWVlUSm9TRlpYTVVabFJrNTFVV3hrCmFWSnJjRmxXVkVaaFdWZE9jMk5GVmxaaVYyaFBWbTEwZDA1c1duRlRhbEpwVFZaYVNGWkhkRzlpUmtwMFlVZEdSQXBpVlhCSVEyeE8KY2xwR1ZsZFdia0paVm0xNFlWTXlUWGxVYTJoc1VteHdXVlZzVm5kV01WbDRXa1JDV2xadGVGaFdNblJyWVZaS2MxZHNXbHBpUmtwNgpWakZhVjJSSFZrWmtSbWhTQ2xkRlNsbFdSbVIzVmpKT2MxZHVTbGhoTTFKeVZXcEdTazFzVlhsbFIwVkxXVlphYTFJeFRuVmlSbWhYCllsVTBNRlpzWTNoV01rMTRVMjVXVm1KWFpFeFdWekUwWkRKSmVHSkdWbFFLVmtaYVQxUldXbmRsVmxwMFRWVmtXR0pHV2xwVlYzaHoKVm0xS1IxTnJhRmRpV0doWVZqQlZlRlpXUm5OV2JXeFRZbXRHTkZac1dtdE9RWEJxVFZac05WVnROVXRoVlRCNVZXMUdWd3BoTVZwbwpXVlZhZDFKc1pISmtSbWhYVFVoQ1NWWnFTWGhqTWtaR1RWWnNVbUpIYUVWWmExcDNUVVpTVm1GSE9WZGFNMEpQVm0wMVExTldXbkZTCmJUbHBUVmRTU1ZVeWVHRlhSMHBIQ2xkc1pGcFdNMUpvUTJ4U1IxWllhRmhpUjFKeVZXcEdZVk5XVm5SbFIwWlZZbFZXTmxWWGREQlcKTVZwMFZXcE9XRlpzY0dGYVZscExaRlpPZEdGRk5VNWlWMmhIVmpGa01GWnRWa1lLVGxWa1dGZEhlSE5WYWs1VFYxWldjVkZzWkU5UwpiWGg1Vm0xd1UxWXdNVmRqUldOTFVUSXhTbVZzY0VaVmJXaHNZa1p3U2xadGNFZGlNazE0Vmxob1lWSlhhRzlWTUZaWFZFWmFjd3BhClNHUlVUV3RzTkZZeGFHOWhWa3AwWVVoS1ZtRnJTbWhaTVZwelkyeHdSVlJyTldsU2JHOTNWa2Q0YTAxR1dYaFRia3BwVWtaS1YxUlgKTlZOalozQlhZa2RTVEZWcVNrOVRWazV6Q2xwRk5WTmlhMHBPVm0wd2QyVkdVWGhUYmxKV1lUSk9URll5ZEd0ak1WbDRVMnhrYVZKRwpjRmhaYTFwTFZFWndXRTFXWkZOTlYxSmFXVlZhYjJGV1NYcGhTR1JYVm5wRk1GVjZTa29LWlVaV2MyRkhlRk5YUmtwWlEyeHNjbHBHClRsaFdia0pIVjJ0U1EyRkdWbGxSYXpsWFlsUkZlbFJWV210WFIxSklUMVphVG1FeFdUQldhMlF3WWpGYWRGTnJaRk5oTTJoWVdXeFMKUXdwTk1YQldWbFJHVkZGWVFsaFpiWE14VjFac2RHVkZkRlpTYkhCNFZrZHpOVlpXU25OalJFRkxWMnRXYTFJeFpITlhXR1JPVmtaSwpWMVJYY0ZkVFJscDBUVlZhYkZKck5URlZWM2hoQ21GV1pFaGFNM0JYVWpOb2FGZFdXa3RXTVU1MVZXeE9hVll5YUZCV2JURXdaREExCmMxZHVSbFJoYkVwd1ZGWmFZVk5HV2toa1J6bHBVbXR3TUZsVlpFZFNRWEJwVmxoQ1NWRXllRThLWTJ4d1IxWnNaRmRpYTBwMlZtMHgKTkZsV1RYbFVXR3hWWVRKb2MxVnRlSGRYVmxaelZtNWtWMkpHYkRSWFZFNXZWR3hKZUZKcVZsZFNNMmh5Vm1wS1MyTnJOVmhQVmxwcApZbXRLTmdwV01WcGhXVmRTUms1V1dsVmlSMmhYUTJ4a1JsTnRPVmRXTTJoeVZsUkdUMUl5U2tkaFJUVlhWMFpLVmxadE1UQlpWMVpYCldraEtXR0pVYUV4WFZsWldaVVpaZVZScmJHbFNiVkp3Q2xWdGRIZFVWbHB6V1ROb1YwMXJNVFJWTWpWWFZsZEtXR1JGZUZkV2VrRjQKVlZSS1NtVkdWbk5oUjNkTFZXeG9VMVl4V25SbFNHUnNWbXh3TUZSV1ZtdGhSa2w0VW1wYVZsWXphSG9LVm0weFIyTnNaSFJoUmxwTwpVbTVDYUZkc1dsWmxSbHBYVW01T1YySlhlRmhXTUZaTFUxWmFkR05GWkZaa00wSlRWRlphYzA1V1ZuUk9WWFJvVmxSQ05WWlhlRzlYClozQlhUVEZLYndwV2JYQkhaREZaZUZwSVNsQldNMEp3Vm14YWQxTldXbkZUV0docVRWWldOVlV5TlV0V1IwcElZVVZXV21FeGNETlUKVlZweVpERmFWVlpzWkdGTk1FcFFWbGQwVjFOck1VZGFSbFpTQ21KVlduQlVWM1IzVTBaVmVVNVdUbGRpVlhCSlEyMVdSMXBHY0ZkTgpNVXB2VVRJeFIxSXhXbGxhUm1ocFYwWktlRmRYZEd0Vk1sWnpXa2hLWVZKNmJGaFVWM1JYVG14V1dFMVZaRmNLVFZad01GWkhjRk5XCmJVWnlWMjFHWVZac2NFeFdNV1JMVWpGa2MyRkdUazVXV0VKSVZtcEdZV0l5VVhoWFdHZExWa2QwYTFZeFpFaGxTRXBXWVdzMVZGbHEKUm5OamJGcDFXa1pTVXdwaVdHZzFWbTB4ZDFVeFdYZE5WbHBxVTBjNVRGVlVTalJoTWsxNFZtNUtWbUpYZUZoV2ExWldaREZhYzFWcgpkRTVTTUZZMVZXMDFUMVpIUlhsVmJrWldZa1pLZGxaRldtRmpkM0JoQ2xKRlNtOVVWVkpYVTBaVmVXVkhkRnBXYXpWSVZqSTFRMVpXCldrWmpSbEpYVm14d2FGbDZSbUZXVmtwMFpFWmthVkp1UWtwV2JYaGhZakpGZUZkcmFGWlhSM2hSVld0a05GSldaSFVLWWpOa1VGVlkKUWtWWGJtOTNUMVZPYmxCVU1Fc0sK")
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = base64.b64decode(Basic)
Basic = codecs.decode(Basic,"uu")
print Basic
| 334.652174 | 7,140 | 0.98285 | 75 | 7,697 | 100.866667 | 0.12 | 0.023265 | 0.0423 | 0.04957 | 0.050231 | 0.050231 | 0.050231 | 0.050231 | 0.050231 | 0.050231 | 0 | 0.088898 | 0.007665 | 7,697 | 22 | 7,141 | 349.863636 | 0.901545 | 0 | 0 | 0.75 | 0 | 0 | 0.924256 | 0.923996 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0.1 | null | null | 0.05 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e6348f582600b4332b649d1996fd2afa215fba96 | 8,459 | py | Python | tests/test_telegram_bot.py | juanitodread/gorrion | 1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19 | [
"Apache-2.0"
] | 1 | 2020-09-18T17:53:03.000Z | 2020-09-18T17:53:03.000Z | tests/test_telegram_bot.py | juanitodread/gorrion | 1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19 | [
"Apache-2.0"
] | 6 | 2020-10-27T03:31:41.000Z | 2021-09-16T18:58:44.000Z | tests/test_telegram_bot.py | juanitodread/gorrion | 1f2c16b7402c237dfb4b47f0fa0afeb3bff7bd19 | [
"Apache-2.0"
] | null | null | null | from unittest.mock import patch, MagicMock
from src.clients.spotify import SpotifyApiError
from src.clients.twitter import PublishedTweet
from src.telegram_bot import TelegramBot
class TestTelegramBot:
@patch('src.telegram_bot.Bot')
def test_constructor(self, bot_mock):
telegram_bot = TelegramBot()
assert telegram_bot._bot is not None
bot_mock.assert_called_once_with(token=None)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
def test_process_event_when_invalid_command(self, bot_mock, update_mock):
update = MagicMock()
update.message.text = 'invalid-command'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='Invalid command',
)
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text=(
'Supported commands are: \n\n'
'/start\n'
'/playing\n'
'/lyric\n'
'/album\n'
'/tracks\n'
'/about'
)
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
def test_process_start_command(self, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/start'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='Welcome to Gorrion Bot 🐦🤖'
)
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text=(
'Supported commands are: \n\n'
'/start\n'
'/playing\n'
'/lyric\n'
'/album\n'
'/tracks\n'
'/about'
)
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_command(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/playing'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing.return_value.tweet = 'tweet-message'
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='tweet-message'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_command_when_error(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/playing'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing.side_effect = SpotifyApiError('error')
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='error'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_with_lyrics_command(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/lyric'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_with_lyrics.return_value = [
PublishedTweet(id_='1', tweet='tweet1', entity=None),
PublishedTweet(id_='2', tweet='lyric1', entity=None),
]
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='tweet1'
)
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='lyric1'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_with_lyrics_command_when_error(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/lyric'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_with_lyrics.side_effect = SpotifyApiError('error')
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='error'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_album_command(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/album'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_album.return_value = PublishedTweet(id_='1', tweet='tweet1', entity=None)
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='tweet1'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_album_command_with_error(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/album'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_album.side_effect = SpotifyApiError('error')
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='error'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_album_with_tracks_command(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/tracks'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_album_with_tracks.return_value = [
PublishedTweet(id_='1', tweet='album-tweet', entity=None),
PublishedTweet(id_='2', tweet='tracks-tweet', entity=None),
]
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='album-tweet'
)
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='tracks-tweet'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
@patch('src.telegram_bot.Gorrion')
def test_process_playing_album_with_tracks_command_with_error(self, gorrion_mock, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/tracks'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
gorrion_mock.return_value.playing_album_with_tracks.side_effect = SpotifyApiError('error')
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='error'
)
@patch('src.telegram_bot.Update')
@patch('src.telegram_bot.Bot')
def test_process_about_command(self, bot_mock, update_mock):
update = MagicMock()
update.message.text = '/about'
update.message.chat.id = '123'
update_mock.de_json.return_value = update
telegram_bot = TelegramBot()
telegram_bot.process_event({})
bot_mock.return_value.send_message.assert_any_call(
chat_id='123',
text='Made with ❤️ by @juanitodread'
)
| 34.526531 | 115 | 0.632817 | 997 | 8,459 | 5.049147 | 0.075226 | 0.122368 | 0.088995 | 0.117004 | 0.903457 | 0.903457 | 0.886174 | 0.880413 | 0.880413 | 0.862932 | 0 | 0.014125 | 0.255113 | 8,459 | 244 | 116 | 34.668033 | 0.784161 | 0 | 0 | 0.710145 | 0 | 0 | 0.144225 | 0.052607 | 0 | 0 | 0 | 0 | 0.082126 | 1 | 0.057971 | false | 0 | 0.019324 | 0 | 0.082126 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e653e35082cae6144e135f84fd39945765e87b50 | 27,450 | py | Python | eoflow/models/segmentation_task/segmentation_unets.py | JDESLOIRES/eo-flow | def495e9292809656b906cfd6b8e7389ff9cea61 | [
"MIT"
] | null | null | null | eoflow/models/segmentation_task/segmentation_unets.py | JDESLOIRES/eo-flow | def495e9292809656b906cfd6b8e7389ff9cea61 | [
"MIT"
] | null | null | null | eoflow/models/segmentation_task/segmentation_unets.py | JDESLOIRES/eo-flow | def495e9292809656b906cfd6b8e7389ff9cea61 | [
"MIT"
] | null | null | null | import logging
import tensorflow as tf
from marshmallow import fields
from eoflow.models.layers import Conv2D, Deconv2D, CropAndConcat, Conv3D, MaxPool3D, Reduce3DTo2D, ResConv2D, PyramidPoolingModule
from .segmentation_base import BaseSegmentationModel
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
class FCNModel(BaseSegmentationModel):
""" Implementation of a vanilla Fully-Convolutional-Network (aka U-net) """
class FCNModelSchema(BaseSegmentationModel._Schema):
n_layers = fields.Int(required=True, description='Number of layers of the FCN model', example=10)
keep_prob = fields.Float(required=True, description='Keep probability used in dropout layers.', example=0.5)
features_root = fields.Int(required=True, description='Number of features at the root level.', example=32)
conv_size = fields.Int(missing=3, description='Size of the convolution kernels.')
deconv_size = fields.Int(missing=2, description='Size of the deconvolution kernels.')
conv_stride = fields.Int(missing=1, description='Stride used in convolutions.')
dilation_rate = fields.List(fields.Int, missing=1, description='Dilation rate used in convolutions.')
add_dropout = fields.Bool(missing=False, description='Add dropout to layers.')
add_batch_norm = fields.Bool(missing=True, description='Add batch normalization to layers.')
bias_init = fields.Float(missing=0.0, description='Bias initialization value.')
use_bias = fields.Bool(missing=True, description='Add bias parameters to convolutional layer.')
padding = fields.String(missing='VALID', description='Padding type used in convolutions.')
pool_size = fields.Int(missing=2, description='Kernel size used in max pooling.')
pool_stride = fields.Int(missing=2, description='Stride used in max pooling.')
class_weights = fields.List(fields.Float, missing=None, description='Class weights used in training.')
def build(self, inputs_shape):
"""Builds the net for input x."""
x = tf.keras.layers.Input(inputs_shape[1:])
dropout_rate = 1 - self.config.keep_prob
# Encoding path
# the number of features of the convolutional kernels is proportional to the square of the level
# for instance, starting with 32 features at the first level (layer=0), there will be 64 features at layer=1 and
# 128 features at layer=2
net = x
connection_outputs = []
for layer in range(self.config.n_layers):
# compute number of features as a function of network depth level
features = 2 ** layer * self.config.features_root
# bank of two convolutional filters
conv = Conv2D(
filters=features,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
dilation=self.config.dilation_rate,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
use_bias=self.config.use_bias,
num_repetitions=2)(net)
connection_outputs.append(conv)
# max pooling operation
net = tf.keras.layers.MaxPool2D(
pool_size=self.config.pool_size,
strides=self.config.pool_stride,
padding='SAME')(conv)
# bank of 2 convolutional filters at bottom of U-net.
bottom = Conv2D(
filters=2 ** self.config.n_layers * self.config.features_root,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
dilation=self.config.dilation_rate,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
use_bias=self.config.use_bias,
num_repetitions=2,
padding=self.config.padding)(net)
net = bottom
# Decoding path
# the decoding path mirrors the encoding path in terms of number of features per convolutional filter
for layer in range(self.config.n_layers):
# find corresponding level in decoding branch
conterpart_layer = self.config.n_layers - 1 - layer
# get same number of features as counterpart layer
features = 2 ** conterpart_layer * self.config.features_root
deconv = Deconv2D(
filters=features,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(net)
# # skip connections to concatenate features from encoding path
cc = CropAndConcat()(connection_outputs[conterpart_layer],
deconv)
# bank of 2 convolutional filters
net = Conv2D(
filters=features,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
dilation=self.config.dilation_rate,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
use_bias=self.config.use_bias,
num_repetitions=2,
padding=self.config.padding)(cc)
# final 1x1 convolution corresponding to pixel-wise linear combination of feature channels
logits = tf.keras.layers.Conv2D(
filters=self.config.n_classes,
kernel_size=1)(net)
logits = tf.keras.layers.Softmax()(logits)
self.net = tf.keras.Model(inputs=x, outputs=logits)
def call(self, inputs, training=None):
return self.net(inputs, training)
class TFCNModel(BaseSegmentationModel):
""" Implementation of a Temporal Fully-Convolutional-Network """
class TFCNModelSchema(BaseSegmentationModel._Schema):
n_layers = fields.Int(required=True, description='Number of layers of the FCN model', example=10)
keep_prob = fields.Float(required=True, description='Keep probability used in dropout layers.', example=0.5)
features_root = fields.Int(required=True, description='Number of features at the root level.', example=32)
conv_size = fields.Int(missing=3, description='Size of the convolution kernels.')
deconv_size = fields.Int(missing=2, description='Size of the deconvolution kernels.')
conv_size_reduce = fields.Int(missing=3, description='Size of the kernel for time reduction.')
conv_stride = fields.Int(missing=1, description='Stride used in convolutions.')
add_dropout = fields.Bool(missing=False, description='Add dropout to layers.')
add_batch_norm = fields.Bool(missing=True, description='Add batch normalization to layers.')
bias_init = fields.Float(missing=0.0, description='Bias initialization value.')
use_bias = fields.Bool(missing=True, description='Add bias parameters to convolutional layer.')
padding = fields.String(missing='VALID', description='Padding type used in convolutions.')
single_encoding_conv = fields.Bool(missing=False, description="Whether to apply 1 or 2 banks of conv filters.")
pool_size = fields.Int(missing=2, description='Kernel size used in max pooling.')
pool_stride = fields.Int(missing=2, description='Stride used in max pooling.')
pool_time = fields.Bool(missing=False, description='Operate pooling over time dimension.')
class_weights = fields.List(fields.Float, missing=None, description='Class weights used in training.')
def build(self, inputs_shape):
x = tf.keras.layers.Input(inputs_shape[1:])
dropout_rate = 1 - self.config.keep_prob
num_repetitions = 1 if self.config.single_encoding_conv else 2
# encoding path
net = x
connection_outputs = []
for layer in range(self.config.n_layers):
# compute number of features as a function of network depth level
features = 2 ** layer * self.config.features_root
# bank of one 3d convolutional filter; convolution is done along the temporal as well as spatial directions
conv = Conv3D(
features,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
num_repetitions=num_repetitions,
use_bias=self.config.use_bias,
padding=self.config.padding)(net)
connection_outputs.append(conv)
# max pooling operation
net = MaxPool3D(
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
pool_time=self.config.pool_time)(conv)
# Bank of 1 3d convolutional filter at bottom of FCN
bottom = Conv3D(
2 ** self.config.n_layers * self.config.features_root,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
num_repetitions=num_repetitions,
padding=self.config.padding,
use_bias=self.config.use_bias,
convolve_time=(not self.config.pool_time))(net)
# Reduce temporal dimension
bottom = Reduce3DTo2D(
2 ** self.config.n_layers * self.config.features_root,
kernel_size=self.config.conv_size_reduce,
stride=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate)(bottom)
net = bottom
# decoding path
for layer in range(self.config.n_layers):
# find corresponding level in decoding branch
conterpart_layer = self.config.n_layers - 1 - layer
# get same number of features as counterpart layer
features = 2 ** conterpart_layer * self.config.features_root
# transposed convolution to upsample tensors
deconv = Deconv2D(
filters=features,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(net)
# skip connection with linear combination along time
reduced = Reduce3DTo2D(
features,
kernel_size=self.config.conv_size_reduce,
stride=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate)(connection_outputs[conterpart_layer])
# crop and concatenate
cc = CropAndConcat()(reduced, deconv)
# bank of 2 convolutional layers as in standard FCN
net = Conv2D(
features,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
use_bias=self.config.use_bias,
num_repetitions=2)(cc)
# final 1x1 convolution corresponding to pixel-wise linear combination of feature channels
logits = tf.keras.layers.Conv2D(
filters=self.config.n_classes,
kernel_size=1)(net)
logits = tf.keras.layers.Softmax()(logits)
self.net = tf.keras.Model(inputs=x, outputs=logits)
def call(self, inputs, training=None):
return self.net(inputs, training)
class ResUnetA(FCNModel):
"""
ResUnetA
https://github.com/feevos/resuneta/tree/145be5519ee4bec9a8cce9e887808b8df011f520/models
NOTE: The input to this network is a dictionary specifying input features and three output target images. This
might require some modification to the functions used to automate training and evaluation. Get in touch through
issues if this happens.
TODO: build architecture from parameters as for FCn and TFCN
"""
def build(self, inputs_shape):
"""Builds the net for input x."""
x = tf.keras.layers.Input(shape=inputs_shape['features'][1:], name='features')
dropout_rate = 1 - self.config.keep_prob
# block 1
initial_conv = Conv2D(
filters=self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
use_bias=self.config.use_bias,
batch_normalization=True,
padding=self.config.padding,
num_repetitions=1)(x)
# block 2
resconv_1 = ResConv2D(
filters=self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15, 31],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
use_bias=self.config.use_bias,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=4)(initial_conv)
# block 3
pool_1 = Conv2D(
filters=2 * self.config.features_root,
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding='SAME',
num_repetitions=1)(resconv_1)
# block 4
resconv_2 = ResConv2D(
filters=2 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15, 31],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=4)(pool_1)
# block 5
pool_2 = Conv2D(
filters=4 * self.config.features_root,
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding='SAME',
num_repetitions=1)(resconv_2)
# block 6
resconv_3 = ResConv2D(
filters=4 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=3)(pool_2)
# block 7
pool_3 = Conv2D(
filters=8 * self.config.features_root,
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding='SAME',
num_repetitions=1)(resconv_3)
# block 8
resconv_4 = ResConv2D(
filters=8 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=3)(pool_3)
# block 9
pool_4 = Conv2D(
filters=16 * self.config.features_root,
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding='SAME',
num_repetitions=1)(resconv_4)
# block 10
resconv_5 = ResConv2D(
filters=16 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=1)(pool_4)
# block 11
pool_5 = Conv2D(
filters=32 * self.config.features_root,
kernel_size=self.config.pool_size,
strides=self.config.pool_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding='SAME',
num_repetitions=1)(resconv_5)
# block 12
resconv_6 = ResConv2D(
filters=32 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=1)(pool_5)
# block 13
ppm1 = PyramidPoolingModule(filters=32 * self.config.features_root,
batch_normalization=True)(resconv_6)
# block 14
deconv_1 = Deconv2D(
filters=32 * self.config.features_root,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(ppm1)
# block 15
concat_1 = CropAndConcat()(resconv_5, deconv_1)
concat_1 = Conv2D(
filters=16 * self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True, # maybe
padding=self.config.padding,
num_repetitions=1)(concat_1)
# block 16
resconv_7 = ResConv2D(
filters=16 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=1)(concat_1)
# block 17
deconv_2 = Deconv2D(
filters=16 * self.config.features_root,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(resconv_7)
# block 18
concat_2 = CropAndConcat()(resconv_4, deconv_2)
concat_2 = Conv2D(
filters=8 * self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True, # maybe
padding=self.config.padding,
num_repetitions=1)(concat_2)
# block 19
resconv_8 = ResConv2D(
filters=8 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=3)(concat_2)
# block 20
deconv_3 = Deconv2D(
filters=8 * self.config.features_root,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(resconv_8)
# block 21
concat_3 = CropAndConcat()(resconv_3, deconv_3)
concat_3 = Conv2D(
filters=4 * self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True,
padding=self.config.padding,
num_repetitions=1)(concat_3)
# block 22
resconv_9 = ResConv2D(
filters=4 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=3)(concat_3)
# block 23
deconv_4 = Deconv2D(
filters=4 * self.config.features_root,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(resconv_9)
# block 24
concat_4 = CropAndConcat()(resconv_2, deconv_4)
concat_4 = Conv2D(
filters=2 * self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True,
padding=self.config.padding,
num_repetitions=1)(concat_4)
# block 25
resconv_10 = ResConv2D(
filters=2 * self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15, 31],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=4)(concat_4)
# block 26
deconv_5 = Deconv2D(
filters=2 * self.config.features_root,
kernel_size=self.config.deconv_size,
batch_normalization=self.config.add_batch_norm)(resconv_10)
# block 27
concat_5 = CropAndConcat()(resconv_1, deconv_5)
concat_5 = Conv2D(
filters=self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True,
padding=self.config.padding,
num_repetitions=1)(concat_5)
# block 28
resconv_11 = ResConv2D(
filters=self.config.features_root,
kernel_size=self.config.conv_size,
dilation=[1, 3, 15, 31],
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
padding=self.config.padding,
num_parallel=4)(concat_5)
# block 29
concat_6 = CropAndConcat()(initial_conv, resconv_11)
concat_6 = Conv2D(
filters=self.config.features_root,
kernel_size=1, # 1x1 kernel
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
use_bias=self.config.use_bias,
dropout_rate=dropout_rate,
batch_normalization=True,
padding=self.config.padding,
num_repetitions=1)(concat_6)
# block 30
ppm2 = PyramidPoolingModule(filters=self.config.features_root,
batch_normalization=True)(concat_6)
# comditioned multi-tasking
# first get distance
distance_conv = Conv2D(
filters=self.config.features_root,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
num_repetitions=2,
padding=self.config.padding)(concat_6) # in last layer we take the combined features
logits_distance = tf.keras.layers.Conv2D(filters=self.config.n_classes, kernel_size=1)(distance_conv)
logits_distance = tf.keras.layers.Softmax(name='distance')(logits_distance)
# concatenate distance logits to features
dcc = CropAndConcat()(ppm2, logits_distance)
boundary_conv = Conv2D(
filters=self.config.features_root,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
num_repetitions=1,
padding=self.config.padding)(dcc)
logits_boundary = tf.keras.layers.Conv2D(filters=self.config.n_classes, kernel_size=1)(boundary_conv)
logits_boundary = tf.keras.layers.Softmax(name='boundary')(logits_boundary)
bdcc = CropAndConcat()(dcc, logits_boundary)
extent_conv = Conv2D(
filters=self.config.features_root,
kernel_size=self.config.conv_size,
strides=self.config.conv_stride,
add_dropout=self.config.add_dropout,
dropout_rate=dropout_rate,
batch_normalization=self.config.add_batch_norm,
num_repetitions=2,
padding=self.config.padding)(bdcc)
logits_extent = tf.keras.layers.Conv2D(filters=self.config.n_classes, kernel_size=1)(extent_conv)
logits_extent = tf.keras.layers.Softmax(name='extent')(logits_extent)
self.net = tf.keras.Model(inputs=x, outputs=[logits_extent, logits_boundary, logits_distance])
def call(self, inputs, training=True):
return self.net(inputs, training)
| 42.101227 | 130 | 0.628561 | 3,208 | 27,450 | 5.168017 | 0.089152 | 0.155015 | 0.051752 | 0.053079 | 0.804874 | 0.778636 | 0.776645 | 0.769045 | 0.766813 | 0.753061 | 0 | 0.020992 | 0.28674 | 27,450 | 651 | 131 | 42.165899 | 0.825783 | 0.091767 | 0 | 0.75102 | 0 | 0 | 0.047177 | 0 | 0 | 0 | 0 | 0.001536 | 0 | 1 | 0.012245 | false | 0 | 0.010204 | 0.006122 | 0.038776 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e66af010d9211d43a11247fe37d1de7638e339a3 | 519 | py | Python | 03/03/replace.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | 03/03/replace.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | 39 | 2017-07-31T22:54:01.000Z | 2017-08-31T00:19:03.000Z | 03/03/replace.py | pylangstudy/201708 | 126b1af96a1d1f57522d5a1d435b58597bea2e57 | [
"CC0-1.0"
] | null | null | null | s = b'abcabc'; print(s, s.replace(b'a', b'A'))
s = b'abcabc'; print(s, s.replace(b'ab', b'12'))
s = b'abcabc'; print(s, s.replace(b'a', b'A', 1))
s = b'abcabc'; print(s, s.replace(b'z', b'Z'))
s = bytearray(b'abcabc'); print(s, s.replace(bytearray(b'a'), bytearray(b'A')))
s = bytearray(b'abcabc'); print(s, s.replace(bytearray(b'ab'), bytearray(b'12')))
s = bytearray(b'abcabc'); print(s, s.replace(bytearray(b'a'), bytearray(b'A'), 1))
s = bytearray(b'abcabc'); print(s, s.replace(bytearray(b'z'), bytearray(b'Z')))
| 51.9 | 82 | 0.620424 | 102 | 519 | 3.156863 | 0.107843 | 0.372671 | 0.298137 | 0.322981 | 0.888199 | 0.888199 | 0.888199 | 0.888199 | 0.745342 | 0.745342 | 0 | 0.012821 | 0.098266 | 519 | 9 | 83 | 57.666667 | 0.675214 | 0 | 0 | 0 | 0 | 0 | 0.131021 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
050331413ba9196db3d09c15af2679a97ba1dbc3 | 44 | py | Python | stepik/3559/66578/step_4/script.py | tshemake/Software-Development | e6dff82dffa49e66dc5fa5650af775437ebb6a9b | [
"Unlicense"
] | null | null | null | stepik/3559/66578/step_4/script.py | tshemake/Software-Development | e6dff82dffa49e66dc5fa5650af775437ebb6a9b | [
"Unlicense"
] | null | null | null | stepik/3559/66578/step_4/script.py | tshemake/Software-Development | e6dff82dffa49e66dc5fa5650af775437ebb6a9b | [
"Unlicense"
] | null | null | null | print(77777777777777777 * 77777777777777777) | 44 | 44 | 0.886364 | 3 | 44 | 13 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.809524 | 0.045455 | 44 | 1 | 44 | 44 | 0.119048 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
05098a06560138d452a5f2a0b763f3e68a76ea29 | 1,977 | py | Python | API/server/python-flask-server-generated/swagger_server/controllers/model_flow_chart_edge_controller.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | 2 | 2020-04-30T01:06:55.000Z | 2020-06-08T04:11:28.000Z | API/server/python-flask-server-generated/swagger_server/controllers/model_flow_chart_edge_controller.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | 5 | 2020-04-13T14:13:53.000Z | 2021-08-24T17:16:30.000Z | API/server/python-flask-server-generated/swagger_server/controllers/model_flow_chart_edge_controller.py | zhuofusong/machine-fault-diagnosis | 4c35885e3fbb3c552f526019313a8eae9df28905 | [
"MIT"
] | null | null | null | import connexion
import six
from swagger_server.models.model_flow_chart_edge_meta import ModelFlowChartEdgeMeta # noqa: E501
from swagger_server import util
def model_flow_node_model_flow_id_edge_id_delete(model_flow_id, edge_id): # noqa: E501
"""delete an edge's information in a model flow chart
delete an edge's information in a model flow chart # noqa: E501
:param model_flow_id: model flow chart id
:type model_flow_id: str
:param edge_id: edge's id in a model flow chart
:type edge_id: str
:rtype: object
"""
return 'do some magic!'
def model_flow_node_model_flow_id_edge_id_get(model_flow_id, edge_id): # noqa: E501
"""retrieve an edge's information in a model flow chart
retrieve an edge's information in a model flow chart # noqa: E501
:param model_flow_id: model flow chart id
:type model_flow_id: str
:param edge_id: edge's id in a model flow chart
:type edge_id: str
:rtype: List[ModelFlowChartEdgeMeta]
"""
return 'do some magic!'
def model_flow_node_model_flow_id_edge_id_post(model_flow_id, edge_id): # noqa: E501
"""create an edge's information in a model flow chart
create an edge's information in a model flow chart # noqa: E501
:param model_flow_id: model flow chart id
:type model_flow_id: str
:param edge_id: edge's id in a model flow chart
:type edge_id: str
:rtype: List[ModelFlowChartEdgeMeta]
"""
return 'do some magic!'
def model_flow_node_model_flow_id_edge_id_put(model_flow_id, edge_id): # noqa: E501
"""update an edge's information in a model flow chart
update an edge's information in a model flow chart # noqa: E501
:param model_flow_id: model flow chart id
:type model_flow_id: str
:param edge_id: edge's id in a model flow chart
:type edge_id: str
:rtype: List[ModelFlowChartEdgeMeta]
"""
return 'do some magic!'
| 29.954545 | 97 | 0.717248 | 329 | 1,977 | 4.085106 | 0.12766 | 0.247768 | 0.177083 | 0.107143 | 0.885417 | 0.885417 | 0.885417 | 0.811012 | 0.811012 | 0.786458 | 0 | 0.032443 | 0.204856 | 1,977 | 65 | 98 | 30.415385 | 0.822519 | 0.633789 | 0 | 0.333333 | 0 | 0 | 0.096055 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
05604a2d51a48b907978b669911655516081aca0 | 64,907 | py | Python | source/cogs/economy.py | DarkJoij/Eagle-py | 9091dfea680c2dcc18a3f04fc7f63b3bc47e0b70 | [
"MIT"
] | 3 | 2021-08-17T13:07:42.000Z | 2021-09-05T07:15:55.000Z | source/cogs/economy.py | DarkJoij/Eagle-py | 9091dfea680c2dcc18a3f04fc7f63b3bc47e0b70 | [
"MIT"
] | null | null | null | source/cogs/economy.py | DarkJoij/Eagle-py | 9091dfea680c2dcc18a3f04fc7f63b3bc47e0b70 | [
"MIT"
] | 2 | 2021-09-01T10:40:08.000Z | 2021-09-20T15:13:28.000Z | import discord
from discord.ext import commands, tasks
from discord.ext.commands.core import bot_has_guild_permissions
import random
import json
base_color=0x3498db
su_color=0x2ecc71
err_color=0xe74c3c
sale = ['скидка', 'sale', 'распродажа']
oneuse = ['одноразовая', 'one_use']
class economy(commands.Cog):
def __init__(self, Bot):
self.Bot = Bot
self.sender.start()
@commands.command(
aliases = ['награда', 'бонус']
)
@commands.cooldown(1, 43200, commands.BucketType.user)
async def reward(
self,
ctx
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING
eco["users"]["money"][str(ctx.author.id)] += 250
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description=f'''Вы успешно собрали часовую награду.
Получено **250₽** на карманный баланс.
Ваш баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**''',
colour=su_color
)
embed.set_footer(text=f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['работа', 'работать', 'working', 'work_reward', 'зарплата', 'плата']
)
@commands.cooldown(1, 43200, commands.BucketType.user)
async def work(
self,
ctx
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING
profession = random.choice(['стримером', 'фотографом', 'водителем грузовика', 'курьером', 'ловцом змей'])
moneyvalue = random.choice([500, 600, 650, 700, 750, 800, 850, 900, 1000])
eco["users"]["money"][str(ctx.author.id)] += moneyvalue
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description=f'''Вы поработали __{profession}__ и получили **{moneyvalue}₽** на карманный счет.
Ваш баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**''',
colour=su_color
)
embed.set_footer(text=f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['премиум', 'prem', 'прем']
)
@commands.cooldown(1, 86400, commands.BucketType.user)
async def premium(
self,
ctx
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
with open('databases/users_settings/user_db.json', 'r') as f:
usr = json.load(f)
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
err = discord.utils.get(self.Bot.emojis, name='cfalse')
### CHEKING
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING
if str(ctx.author.id) in usr["items"]["premium"]:
eco["users"]["money"][str(ctx.author.id)] += 2500
eco["users"]["bank"][str(ctx.author.id)] += 250
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description=f'''Вы успешно собрали премиум-награду.
Получено **2500₽** на карманный баланс **250₽** на банковский счет.
Ваш баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**''',
colour=su_color
)
embed.set_footer(text=f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description=f'''На вашем аккаунте нет премиум статуса.''',
colour=err_color
)
embed.set_footer(text=f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['баланс', 'деньги', 'wallet', 'кошелек', 'кошелёк', 'purse', 'бал', 'bal', 'b', 'б']
)
async def balance(
self,
ctx,
member: discord.Member = None
):
if member == None:
member = ctx.author
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
bnk = discord.utils.get(self.Bot.emojis, name='bank')
### CHEKING
if not str(member.id) in eco["users"]["money"]:
eco["users"]["money"][str(member.id)] = 25
if not str(member.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(member.id)] = 25
if not str(member.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(member.id)] = 0
### CHEKING
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
description=f''':money_with_wings: Карманный баланс: **{round(eco["users"]["money"][str(member.id)], 1)}₽**
{bnk} Банковский баланс: **{round(eco["users"]["bank"][str(member.id)], 1)}₽**
:coin: Крипто-баланс: **{eco["users"]["crypto"]["userbalance"][str(member.id)]}EC**
Для пополнения кошелька используйте
команды **!работа** и **!премиум** для премиум пользователей.''',
colour=base_color
)
embed.set_author(
icon_url=member.avatar_url,
name=f'Баланс и состояние {member}',
url='https://discord.gg/dVbPMUKFnh'
)
embed.set_footer(
text=f'Вызвал: {ctx.author}',
icon_url=ctx.author.avatar_url
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['перевод', 'перевести', 'заплатить', 'pay']
)
async def transfer(
self,
ctx,
summ: int = None,
member: discord.Member = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
### CHEKING2
if not str(member.id) in eco["users"]["money"]:
eco["users"]["money"][str(member.id)] = 25
if not str(member.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(member.id)] = 25
if not str(member.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(member.id)] = 0
### CHEKING2
if summ == None and member == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = """> Сумма
> Получатель""",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
if not summ == None and member == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Получатель",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif member == ctx.author:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете совершить перевод самому себе.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ > eco["users"]["money"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном балансе недостаточно средств для совершения перевода в банк.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ < 1 or summ > 1000000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете перевести сумму меньше 1₽ и больше 1.000.000.000₽",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["money"][str(ctx.author.id)] -= summ
eco["users"]["money"][str(member.id)] += round(summ*0.99, 1)
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f"Совершен перевод {member.mention} на сумму **{round(summ*0.99, 1)}₽**.",
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['депозит', 'в_банк', 'вбанк']
)
async def deposit(
self,
ctx,
summ: int = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
err = discord.utils.get(self.Bot.emojis, name='cfalse')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if summ == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Сумма",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ < 1 or summ > 1000000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете перевести сумму меньше 1₽ и больше 1.000.000.000₽",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ > eco["users"]["money"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном балансе недостаточно средств для совершения перевода в банк.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["money"][str(ctx.author.id)] -= summ
eco["users"]["bank"][str(ctx.author.id)] += summ
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f"Переведено **{(summ)}₽** на Ваш банковский счёт.",
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['снять', 'обналичить']
)
async def withdraw(
self,
ctx,
summ: int = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if summ == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Сумма",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ < 1 or summ > 1000000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете перевести сумму меньше 1₽ и больше 1.000.000.000₽",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ > eco["users"]["bank"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем банковском счёте недостаточно средств для совершения обналичивания.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["money"][str(ctx.author.id)] += summ
eco["users"]["bank"][str(ctx.author.id)] -= summ
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f"**{summ}₽** снято на Ваш карманный кошелек.",
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['кено', 'fifteen', 'пятнадцать']
)
async def keno(
self,
ctx,
type: int = None,
summ: int = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if type == None and summ == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = """> Число
> Сумма""",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif not type == None and summ == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Сумма",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ < 1 or summ > 1000000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете поставить сумму меньше 1₽ и больше 1.000.000.000₽",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif summ > eco["users"]["money"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном балансе недостаточно средств для совершения перевода в банк.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif type < 1 or type > 10:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Неправильно введено значение угадываемого числа. Вы можете ввсети число не меньше 0 и не больше 10.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
hitnum = random.randint(1, 10)
if type == hitnum:
eco["users"]["money"][str(ctx.author.id)] += round(summ*2)
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Вы точно угадали число!',
description = f"""Ваш выигрыш составляет: **+{round(summ*2)}₽** (Коофициент Х2)
На барабане число: **{hitnum}**.
:money_with_wings: Карманный баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**""",
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif type + 1 == hitnum or type - 1 == hitnum or type + 2 == hitnum or type - 2 == hitnum:
eco["users"]["money"][str(ctx.author.id)] += round(summ*1.25)
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Вы почти угадали число!',
description = f"""Ваш выигрыш составляет: **+{round(summ*1.25)}₽** (Коофициент Х1.25)
На барабане число: **{hitnum}**.
:money_with_wings: Карманный баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**""",
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["money"][str(ctx.author.id)] -= summ
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{err} Вы не угадали число.',
description = f"""Итого: **-{summ}₽**
На барабане число: **{hitnum}**.
:money_with_wings: Карманный баланс: **{eco["users"]["money"][str(ctx.author.id)]}₽**""",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['крипто', 'криптовалюта', 'крипта', 'есоин', 'ecoin']
)
async def crypto(
self,
ctx
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if eco["users"]["crypto"]["cryptovalue"] > 1000000:
mark = discord.utils.get(self.Bot.emojis, name='idle')
text = "Тяжелодоступные"
elif eco["users"]["crypto"]["cryptovalue"] < 1000000:
mark = discord.utils.get(self.Bot.emojis, name='online1')
text = "Легкодоступные"
embed = discord.Embed(
title=f'Курс, цена и состояние криптовалюты Eternal EagleCoin.',
description = f"""Цена за 1 единицу актива: **{eco["users"]["crypto"]["cryptovalue"]}₽**
Состояние активов: {mark} **{text}**.
Ваш крипто-баланс: **{eco["users"]["crypto"]["userbalance"][str(ctx.author.id)]}EC**
Для операции активами использвуйте команды **!крипто купить**, **!крипто продать**, указывая сумму для покупки активов или кол-во активов для их продажи.""",
colour=base_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['крипто_купить', 'к_купить', 'крипто_к']
)
async def crypto_buy(
self,
ctx,
var: int = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if var == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Сумма",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif var < 1 or var > 1000000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете вложить сумму меньше 1₽ и больше 1.000.000.000₽",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif var > eco["users"]["money"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном балансе недостаточно средств для совершения совершения операции.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["money"][str(ctx.author.id)] -= var
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] += var/eco["users"]["crypto"]["cryptovalue"]
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Вы успешно приобрели **{eco["users"]["crypto"]["userbalance"][str(ctx.author.id)]}** активов EC на сумму {var}₽',
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['крипто_продать', 'крипто_п', 'к_продать']
)
async def crypto_sell(
self,
ctx,
var: float = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
### CHEKING1
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING1
if var == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Кол-во активов",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif var < 0 or var > 10000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете продать число активов меньше 0 и больше 10000.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif var > eco["users"]["crypto"]["userbalance"][str(ctx.author.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном крипто-балансе недостаточно активов для совершения совершения операции.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] -= var
eco["users"]["money"][str(ctx.author.id)] += round(var*eco["users"]["crypto"]["cryptovalue"], 1)
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Вы успешно продали {var} активов EC на сумму **{round(var*eco["users"]["crypto"]["cryptovalue"], 1)}₽**',
colour=su_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['роль+', 'лот+', 'role+', 'lot+']
)
@commands.has_guild_permissions(administrator=True)
async def shop_addrole(
self,
ctx,
role: discord.Role = None,
cost: int = None,
tp = None,
salevar: int = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
### CHEKING1
if not str(ctx.guild.id) in eco["tech"]["shop"]["base"]:
eco["tech"]["shop"]["base"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["roleid"]:
eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["salevalue"]:
eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["oneuse"]:
eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)] = {}
### CHEKING1
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
if role == None and cost == None and tp == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = """> Роль
> Сумма
> Тип лота [Опционально]
> Скидка [Опционально]""",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif not role == None and cost == None and tp == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = """> Сумма
> Тип лота [Опционально]
> Скидка [Опционально]""",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if cost < 0 or cost > 100000000:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Вы не можете ввести стоимость роли меньше 0₽ и больше 100.000.000₽.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if bot_has_guild_permissions(manage_roles=True):
if tp == None:
if not str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
eco["tech"]["shop"]["base"][str(ctx.guild.id)][str(role.id)] = cost
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} стоимостью **{cost}** добавлена в __обыкновенный__ лот.',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"Данная роль ({role.mention}) уже присутствует в магазине.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif tp in sale:
if salevar < 0 or salevar > 100:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "Размер скидки не может быть меньше 0 и больше 100.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if not str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)][str(role.id)] = cost
eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)][str(role.id)] = salevar
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} стоимостью **{cost}** добавлена в __скидочный__ лот.',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"Данная роль ({role.mention}) уже присутствует в магазине.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
elif tp in oneuse:
if not str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)][str(role.id)] = cost
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} стоимостью **{cost}** добавлена в __одноразовый__ лот.',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"Данная роль ({role.mention}) уже присутствует в магазине.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
e = discord.Embed(
title=f'{err} Невозможно совершить операцию',
description=f'У меня нет права "Управлять ролями" на этом сервере. Без него я не смогу выдавать роли при покупке. Для разрешения проблемы выдайте мне данное право или обратитесь к создателю сервера.',
color=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
@commands.command(
aliases = ['роль-', 'лот-', 'role-', 'lot-']
)
@commands.has_guild_permissions(administrator=True)
async def shop_remrole(
self,
ctx,
role: discord.Role = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
### CHEKING1
if not str(ctx.guild.id) in eco["tech"]["shop"]["base"]:
eco["tech"]["shop"]["base"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["roleid"]:
eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["salevalue"]:
eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["oneuse"]:
eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)] = {}
### CHEKING1
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
if role == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Роль",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if bot_has_guild_permissions(manage_roles=True):
###BASE###
if str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)] or str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] or str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
if str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)]:
del eco["tech"]["shop"]["base"][str(ctx.guild.id)][str(role.id)]
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} удалена из __обыкновенного__ лота.',
colour=su_color
)
embed.set_footer(text = f'Удалено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
pass
###SALE###
if str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)]:
del eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)][str(role.id)]
del eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)][str(role.id)]
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} убрана из __скидочного__ лотa.',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
pass
###ONEUSE###
if str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
del eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)][str(role.id)]
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Роль {role.mention} ',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
pass
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"Данной роли ({role.mention}) нет в магазине.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
e = discord.Embed(
title=f'{err} Невозможно совершить операцию',
description=f'У меня нет права "Управлять ролями" на этом сервере. Без него я не смогу выдавать роли при покупке. Для разрешения проблемы выдайте мне данное право или обратитесь к создателю сервера.',
color=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
@commands.command(
aliases = ['магазин', 'магазин_ролей', 'role_shop']
)
async def shop(
self,
ctx
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
### CHEKING1
if not str(ctx.guild.id) in eco["tech"]["shop"]["base"]:
eco["tech"]["shop"]["base"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["roleid"]:
eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["salevalue"]:
eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["oneuse"]:
eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)] = {}
### CHEKING1
wt = discord.utils.get(self.Bot.emojis, name='cnone')
embed = discord.Embed(
title=f'{wt} Магазин ролей сервера {ctx.guild.name}:',
colour=base_color
)
if eco["tech"]["shop"]["base"][str(ctx.guild.id)] == {} and eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] == {} and eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)] == {}:
embed.set_footer(text = 'В магазине нет ни одной роли.')
else:
embed.set_footer(text = 'Покупка роли -> !купить [@роль]')
for role in eco["tech"]["shop"]["base"][str(ctx.guild.id)]:
embed.add_field(
name="Базовая роль",
value=f""" `Роль:` <@&{role}>
`Стоимость:` **{eco["tech"]["shop"]["base"][str(ctx.guild.id)][role]}₽**""",
inline=True
)
for role in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)]:
embed.add_field(
name=":tada: Роль с скидкой {}%".format(eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)][role]),
value=f""" `Роль:` <@&{role}>
`Стоимость:` **{eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)][role]}₽**""",
inline=True
)
for role in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
embed.add_field(
name="Одноразовая роль",
value=f""" Роль: <@&{role}>
`Стоимость:` **{eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)][role]}₽**""",
inline=True
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
@commands.command(
aliases = ['купить', 'приобрести', 'купить_роль', 'приобрести_роль', 'buy_role', 'role_buy']
)
async def buy(
self,
ctx,
role: discord.Role = None
):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
### CHEKING
if not str(ctx.guild.id) in eco["tech"]["shop"]["base"]:
eco["tech"]["shop"]["base"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["roleid"]:
eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["sale"]["salevalue"]:
eco["tech"]["shop"]["sale"]["salevalue"][str(ctx.guild.id)] = {}
if not str(ctx.guild.id) in eco["tech"]["shop"]["oneuse"]:
eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)] = {}
### CHEKING
if not str(ctx.author.id) in eco["users"]["money"]:
eco["users"]["money"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["bank"]:
eco["users"]["bank"][str(ctx.author.id)] = 25
if not str(ctx.author.id) in eco["users"]["crypto"]["userbalance"]:
eco["users"]["crypto"]["userbalance"][str(ctx.author.id)] = 0
### CHEKING
err = discord.utils.get(self.Bot.emojis, name='cfalse')
tru = discord.utils.get(self.Bot.emojis, name='ctrue')
if role == None:
embed = discord.Embed(
title=f'{err} Вы не указали параметр:',
description = "> Роль",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if bot_has_guild_permissions(manage_roles=True):
if not str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)] and not str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"Данной роли ({role.mention}) нет в магазине.",
colour=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
if str(role.id) in eco["tech"]["shop"]["base"][str(ctx.guild.id)]:
cst = eco["tech"]["shop"]["base"][str(ctx.guild.id)][str(role.id)]
shopath = eco["tech"]["shop"]["base"][str(ctx.guild.id)]
mark = "PASS"
elif str(role.id) in eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)]:
cst = eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)][str(role.id)]
shopath = eco["tech"]["shop"]["sale"]["roleid"][str(ctx.guild.id)]
mark = "PASS"
elif str(role.id) in eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]:
cst = eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)][str(role.id)]
shopath = eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)]
mark = "DELETE!"
if eco["users"]["money"][str(ctx.author.id)] >= cst:
if not role in ctx.author.roles:
for i in shopath:
if i == str(role.id):
###USER###
try:
buy_role = discord.utils.get(ctx.guild.roles, id=int(i))
await ctx.author.add_roles(buy_role)
eco["users"]["money"][str(ctx.author.id)] -= cst
###ROLE CHECKER###
if mark == 'DELETE!':
del eco["tech"]["shop"]["oneuse"][str(ctx.guild.id)][str(role.id)]
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
embed = discord.Embed(
title=f'{tru} Успешно!',
description = f'Вы успешно приобрели роль {role.mention}. Списано **{cst}₽**',
colour=su_color
)
embed.set_footer(text = f'Добавлено: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
except Exception:
pass
else:
pass
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = f"{ctx.author.mention}, у Вас уже есть данная роль.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
embed = discord.Embed(
title=f'{err} Ошибка!',
description = "На Вашем краманном балансе недостаточно средств для совершения совершения операции.",
colour=err_color
)
embed.set_footer(text = f'Пользователь: {ctx.author}')
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=embed)
else:
await ctx.reply(embed=embed, mention_author=False)
else:
e = discord.Embed(
title=f'{err} Невозможно совершить операцию',
description=f'У меня нет права "Управлять ролями" на этом сервере. Без него я не смогу выдавать роли при покупке. Для разрешения проблемы выдайте мне данное право или обратитесь к создателю сервера.',
color=err_color
)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
###CD ERRORS###
@work.error
async def cd_work_error(
self,
ctx,
error
):
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
if isinstance(error, commands.CommandOnCooldown):
hours, error.retry_after = divmod(error.retry_after, 60 ** 2)
minutes, error.retry_after = divmod(error.retry_after, 60)
e = discord.Embed(
title=f"{err} Ошибка!",
description=f"""Недавно вы уже работали. Вы не можете работать менее, чем раз в 12 часов.
Попробуйте снова через `{round(hours)} ч, {round(minutes)} мин, {round(error.retry_after)} сек`""", color=err_color)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
@premium.error
async def cd_premium_error(
self,
ctx,
error
):
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
err = discord.utils.get(self.Bot.emojis, name='cfalse')
if isinstance(error, commands.CommandOnCooldown):
hours, error.retry_after = divmod(error.retry_after, 60 ** 2)
minutes, error.retry_after = divmod(error.retry_after, 60)
e = discord.Embed(
title=f"{err} Ошибка!",
description=f"""Недавно вы уже забирали премиум-награду. Вы не можете собирать её менее, чем раз в 24 часа.
Попробуйте снова через `{round(hours)} ч, {round(minutes)} мин, {round(error.retry_after)} сек`""", color=err_color)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
@shop_addrole.error
async def perm_addrole_error(
self,
ctx,
error
):
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
if isinstance(error, commands.MissingPermissions):
err = discord.utils.get(self.Bot.emojis, name='cfalse')
e = discord.Embed(
title=f"{err} Ошибка!",
description=f'Недостаточно прав для выполнения команды. Для просмотра и управленния магазином сервера Вам нужно право "Администратор".', color=err_color)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
@shop_remrole.error
async def perm_remrole_error(
self,
ctx,
error
):
with open('databases/server_settings/mass_db.json', 'r') as f:
server = json.load(f)
if isinstance(error, commands.MissingPermissions):
err = discord.utils.get(self.Bot.emojis, name='cfalse')
e = discord.Embed(
title=f"{err} Ошибка!",
description=f'Недостаточно прав для выполнения команды. Для просмотра и управленния магазином сервера Вам нужно право "Администратор".', color=err_color)
if str(ctx.guild.id) in server["reply"]:
await ctx.send(embed=e)
else:
await ctx.reply(embed=e, mention_author=False)
###TASKS LOOP###
@tasks.loop(
hours=12,
reconnect=True
)
async def sender(self):
with open('databases/economy_data/economy_db.json', 'r') as f:
eco = json.load(f)
channel = self.Bot.get_channel(866662493048799242)
value = random.randint(10, 30000)
action = random.choice(['plus', 'minus'])
if action == 'plus':
eco["users"]["crypto"]["cryptovalue"] += value
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
crs = eco["users"]["crypto"]["cryptovalue"]
embed = discord.Embed(
title=f'Курс криптовалюты успешно изменен!',
description = f"Произошёл скачок в цене на **{value}₽**. Текущий курс валюты: {crs}.",
colour=base_color
)
await channel.send(embed=embed)
else:
eco["users"]["crypto"]["cryptovalue"] -= value
with open('databases/economy_data/economy_db.json', 'w') as f:
json.dump(eco, f)
crs = eco["users"]["crypto"]["cryptovalue"]
embed = discord.Embed(
title=f'Курс криптовалюты успешно изменен!',
description = f"Произошло падение в цене на **{value}₽**. Текущий курс валюты: {crs}.",
colour=base_color
)
await channel.send(embed=embed)
@sender.before_loop
async def before_sender(self):
await self.Bot.wait_until_ready()
def setup(Bot):
Bot.add_cog(economy(Bot))
| 48.365872 | 252 | 0.501841 | 7,392 | 64,907 | 4.356331 | 0.063041 | 0.044904 | 0.048506 | 0.057326 | 0.885628 | 0.877213 | 0.872772 | 0.864574 | 0.850599 | 0.835973 | 0 | 0.009766 | 0.353198 | 64,907 | 1,341 | 253 | 48.401939 | 0.756348 | 0.005207 | 0 | 0.750592 | 0 | 0.014996 | 0.22489 | 0.048691 | 0 | 0 | 0.000373 | 0 | 0 | 1 | 0.001579 | false | 0.005525 | 0.003946 | 0 | 0.006314 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
058cf65d82a2aaf320e2d1e63df91679d96775aa | 16,997 | py | Python | capstoneproject/tests/test_models/test_db_queries/test_words_and_features.py | jwillikers/content-rating | 25ccdb02edd18d0118d60fce609b60baadef257b | [
"Apache-2.0"
] | 1 | 2021-02-03T20:55:33.000Z | 2021-02-03T20:55:33.000Z | capstoneproject/tests/test_models/test_db_queries/test_words_and_features.py | jwillikers/content-rating | 25ccdb02edd18d0118d60fce609b60baadef257b | [
"Apache-2.0"
] | null | null | null | capstoneproject/tests/test_models/test_db_queries/test_words_and_features.py | jwillikers/content-rating | 25ccdb02edd18d0118d60fce609b60baadef257b | [
"Apache-2.0"
] | null | null | null | from django.test import TestCase
from django.contrib.auth.models import User
from capstoneproject.models.db_queries.words_and_features \
import words_and_features
from capstoneproject.models.models.category import Category
from capstoneproject.models.models.word import Word
from capstoneproject.models.models.word_feature import WordFeature
from capstoneproject.models.models.user_storage import UserStorage
class WordsAndFeaturesTestClass(TestCase):
@classmethod
def setUpClass(cls):
cls.cat1 = Category.categories.create(
name='test_category1', weight=1, default=True)
cls.cat2 = Category.categories.create(
name='test_category2', weight=2, default=True)
cls.feature1 = WordFeature.word_features.create(
default=True, category=cls.cat1, strength=True, weight=1)
cls.feature2 = WordFeature.word_features.create(
default=True, category=cls.cat2, strength=False, weight=3)
cls.feature4 = WordFeature.word_features.create(
default=True, category=cls.cat1, strength=True, weight=2)
cls.feature5 = WordFeature.word_features.create(
default=False, category=cls.cat1, strength=False, weight=1)
cls.word1 = Word.words.create(name='word1', default=True)
cls.word1.word_features.add(cls.feature1.id)
cls.word2 = Word.words.create(name='word2', default=True)
cls.word2.word_features.add(cls.feature2.id)
cls.word3 = Word.words.create(name='word3', default=True)
cls.word3.word_features.add(cls.feature1.id)
cls.word3.word_features.add(cls.feature2.id)
cls.word4 = Word.words.create(name='word4', default=True)
cls.word4.word_features.add(cls.feature4.id)
cls.word5 = Word.words.create(name='word5', default=False)
cls.word5.word_features.add(cls.feature5.id)
cls.word1.save()
cls.word2.save()
cls.word3.save()
cls.word4.save()
cls.word5.save()
cls.user1 = User.objects.create_user(
username='user1', password='12345')
cls.user1.save()
cls.user2 = User.objects.create_user(
username='user2', password='12346')
cls.user2.save()
cls.user_storage1 = UserStorage.user_storage.get(user=cls.user1)
cls.user_storage1.words.add(cls.word1)
cls.user_storage1.words.add(cls.word2)
cls.user_storage1.words.add(cls.word3)
cls.user_storage1.categories.add(cls.cat1)
cls.user_storage1.categories.add(cls.cat2)
cls.user_storage1.word_features.add(cls.feature1)
cls.user_storage1.word_features.add(cls.feature2)
cls.user_storage1.save()
cls.user_storage2 = UserStorage.user_storage.get(user=cls.user2)
cls.user_storage2.words.add(cls.word1)
cls.user_storage2.words.add(cls.word2)
cls.user_storage2.words.add(cls.word3)
cls.user_storage2.words.add(cls.word5)
cls.user_storage2.categories.add(cls.cat1)
cls.user_storage2.categories.add(cls.cat2)
cls.user_storage2.word_features.add(cls.feature1)
cls.user_storage2.word_features.add(cls.feature2)
cls.user_storage2.word_features.add(cls.feature5)
cls.user_storage2.save()
@classmethod
def tearDownClass(cls):
Category.categories.all().delete()
Word.words.all().delete()
WordFeature.word_features.all().delete()
UserStorage.user_storage.all().delete()
User.objects.all().delete()
def test_args_none(self):
results = words_and_features()
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results, msg='missing word2')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results, msg='missing word3, feature2')
self.assertIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category.id,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results, msg='missing word4')
self.assertIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results, msg='missing word5')
def test_arg_user_id(self):
results = words_and_features(user_id=self.user1.id)
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results, msg='missing word2')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results, msg='missing word3, feature2')
self.assertNotIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results, msg='contains word4 which is not linked to this user')
self.assertNotIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results, msg='should not contain word5 which belongs to user2')
def test_arg_category_id(self):
results = words_and_features(category_id=self.cat1.id)
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertNotIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='contains category 2 word when it should not')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertNotIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='contains category 2 feature in word 3 when it should not')
self.assertIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category.id,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results, msg='missing word4')
self.assertIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results,
msg='missing word5')
def test_arg_strength(self):
results = words_and_features(strength=True)
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertNotIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='contains word2 of Strength = False when it should not')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertNotIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='''contains Strength = False feature in word3 when it should not
''')
self.assertNotIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results,
msg='''contains word4 with Strength = False''')
self.assertNotIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results,
msg='contains word5 of Strength = False when it should not')
def test_arg_user_id_and_category_id(self):
results = words_and_features(
user_id=self.user1.id,
category_id=self.cat1.id)
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertNotIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results, msg='contains word4 which is not linked to this user')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertNotIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='contains category 2 word when it should not')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertNotIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='contains category 2 feature in word 3 when it should not')
self.assertNotIn(
{'word_id': self.word4.id,
'word': self.word4.name,
'category_id': self.feature4.category,
'strength': self.feature4.strength,
'weight': self.feature4.weight},
results, msg='contains word4 which is not linked to this user')
self.assertNotIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results,
msg='contains word5 that does not belong to user1')
def test_args_all(self):
results = words_and_features(
user_id=self.user1.id,
category_id=self.cat1.id,
strength=self.feature1.strength)
self.assertIsInstance(results, list, msg='result is not a list')
self.assertGreaterEqual(len(results), 1, msg='result list is empty')
self.assertIsInstance(
results[0], dict,
msg='result is not a list of dictionaries')
self.assertIn(
{'word_id': self.word1.id,
'word': self.word1.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word1')
self.assertNotIn(
{'word_id': self.word2.id,
'word': self.word2.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='''contains word2 of Strength = False and category2 when it should not
''')
self.assertIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat1.id,
'strength': self.feature1.strength,
'weight': self.feature1.weight},
results, msg='missing word3, feature1')
self.assertNotIn(
{'word_id': self.word3.id,
'word': self.word3.name,
'category_id': self.cat2.id,
'strength': self.feature2.strength,
'weight': self.feature2.weight},
results,
msg='''contains Strength = False and category2 feature in word3 when it should not
''')
self.assertNotIn(
{'word_id': self.word5.id,
'word': self.word5.name,
'category_id': self.feature5.category.id,
'strength': self.feature5.strength,
'weight': self.feature5.weight},
results,
msg='''contains word5 of Strength = False that does not belong
to user1''')
| 42.70603 | 94 | 0.574278 | 1,884 | 16,997 | 5.097134 | 0.055202 | 0.052484 | 0.059773 | 0.067479 | 0.863793 | 0.828804 | 0.779132 | 0.738727 | 0.733417 | 0.730501 | 0 | 0.029437 | 0.302465 | 16,997 | 397 | 95 | 42.813602 | 0.780533 | 0 | 0 | 0.755155 | 0 | 0 | 0.179738 | 0 | 0 | 0 | 0 | 0 | 0.139175 | 1 | 0.020619 | false | 0.005155 | 0.018041 | 0 | 0.041237 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
059065958345c6fb4eae1733e2b0f6dc786e04a0 | 24,295 | py | Python | test_backtest_pkg/test_trading.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 3 | 2020-04-22T09:27:33.000Z | 2021-02-04T14:55:13.000Z | test_backtest_pkg/test_trading.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 1 | 2021-02-04T14:54:06.000Z | 2021-02-04T14:54:06.000Z | test_backtest_pkg/test_trading.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 1 | 2021-12-18T10:03:13.000Z | 2021-12-18T10:03:13.000Z | import unittest
from pandas.testing import assert_frame_equal, assert_series_equal
import backtest_pkg as bt
import pandas as pd
import numpy as np
class TestTrading(unittest.TestCase):
def setUp(self):
def construct_price_data(data):
index = pd.date_range('2020-01-01', periods=len(data), freq='D')
price_data = pd.DataFrame(dict(
open = data,
high = [i*1.2 for i in data],
low =[i*0.8 for i in data],
close = data,
adj_close = data,
), index = index)
return price_data
ticker1 = 'ticker1'
data1 = [1., 3., 2., 4., 3., 5.]
ticker2 = 'ticker2'
data2 = [5., 3., 4., 2., 3., 1.]
self.universe = {ticker1, ticker2}
self.price1 = construct_price_data(data1)
self.price2 = construct_price_data(data2)
self.market = bt.market()
self.market.add_stock(ticker1, self.price1)
self.market.add_stock(ticker2, self.price2)
self.trading_system = bt.trading_system()
def test_set_up(self):
# Check initial state of market:
self.assertEqual(self.market.universe, self.universe)
assert_frame_equal(self.market.price['ticker1'], self.price1)
assert_frame_equal(self.market.price['ticker2'], self.price2)
# Checking initial state of trading system:
transaction_df = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction_df = transaction_df.astype({'Ticker': str, 'Quantity': float})
assert_frame_equal(self.trading_system.transaction, transaction_df)
self.assertEqual(self.trading_system.account, (None, dict()))
self.assertEqual(self.trading_system.order_book, list())
def test_market_buy_order(self):
# Create market buy order:
share, price = 1.0, 3.0
order = bt.Order('market', 'ticker1', share, None)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute buy order at date '2020-01-02':
date = pd.to_datetime('2020-01-02')
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(dict(
Date = [date, date],
Ticker = ['ticker1', 'Cash'],
Quantity = [share, -share*price],
))
account = bt.Account(date, {'ticker1':share, 'Cash':-share*price})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_market_sell_order(self):
# Create market sell order:
share, price = 2., 4.
order = bt.Order('market', 'ticker2', -share, None)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute a sell order at '2020-01-03'
date = pd.to_datetime('2020-01-03')
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = ['ticker2', 'Cash'],
Quantity = [-share, share*price],
))
account = bt.Account(date, {
'ticker2':-share,
'Cash':share*price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_target_buy_order(self):
# Create target buy order:
share, price = 1.0, 3.2
order = bt.Order('target', 'ticker1', share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed:
date = pd.to_datetime('2020-01-02')
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(dict(
Date = [date, date],
Ticker = ['ticker1', 'Cash'],
Quantity = [share, -share*price],
))
account = bt.Account(date, {'ticker1':share, 'Cash':-share*price})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_target_sell_order(self):
# Create target sell order:
share, price = 2., 4.3
order = bt.Order('target', 'ticker2', -share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-03' and succeed:
date = pd.to_datetime('2020-01-03')
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = ['ticker2', 'Cash'],
Quantity = [-share, share*price],
))
account = bt.Account(date, {
'ticker2':-share,
'Cash':share*price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_target_fail_order(self):
order_book = []
# Create target buy order:
share, price = 3., 4.3
order = bt.Order('target', 'ticker1', share, price)
self.trading_system.create_order(order)
order_book.append(order)
# Execute at '2020-01-01' and fail:
date = pd.to_datetime('2020-01-01')
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction = transaction.astype({'Ticker': str, 'Quantity': float})
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
# Create target sell order:
share, price = 2., 1.5
order = bt.Order('target', 'ticker2', -share, price)
self.trading_system.create_order(order)
order_book.append(order)
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-03' and fail:
date = pd.to_datetime('2020-01-03')
self.market.execute_orders(self.trading_system, date)
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_buy_open_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', 2.0, 3.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.0
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_sell_open_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', -2.0, 3.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.0
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_buy_intra_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', 2.0, 2.9
date = pd.to_datetime('2020-01-02')
execute_price = 2.9
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at intraday target:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_sell_intra_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', -2.0, 2.9
date = pd.to_datetime('2020-01-02')
execute_price = 2.9
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at intraday target:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_buy_fail_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', 2.0, 2.1
date = pd.to_datetime('2020-01-02')
execute_price = None
# Create limit up buy order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and fail:
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction = transaction.astype({'Ticker': str, 'Quantity': float})
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_up_sell_fail_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_up', 'ticker1', -2.0, 2.1
date = pd.to_datetime('2020-01-02')
execute_price = None
# Create limit up buy order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and fail:
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction = transaction.astype({'Ticker': str, 'Quantity': float})
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_buy_open_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_down', 'ticker2', 2.0, 2.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.0
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_sell_open_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_down', 'ticker2', -2.0, 2.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.0
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_buy_intra_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_down', 'ticker2', 2.0, 3.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.5
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_sell_intra_order(self):
order_type, ticker, share, price = 'limit_down', 'ticker2', -2.0, 3.5
date = pd.to_datetime('2020-01-02')
execute_price = 3.5
# Create limit up order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and succeed at open:
self.market.execute_orders(self.trading_system, date)
transaction =pd.DataFrame(dict(
Date = [date]*2,
Ticker = [ticker, 'Cash'],
Quantity = [share, -share*execute_price],
))
account = bt.Account(date, {
ticker:share,
'Cash':-share*execute_price,
})
order_book = list()
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_buy_fail_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_down', 'ticker1', 2.0, 4.1
date = pd.to_datetime('2020-01-02')
execute_price = None
# Create limit up buy order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and fail:
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction = transaction.astype({'Ticker': str, 'Quantity': float})
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_limit_down_sell_fail_order(self):
# Setting for test:
order_type, ticker, share, price = 'limit_down', 'ticker1', -2.0, 4.1
date = pd.to_datetime('2020-01-02')
execute_price = None
# Create limit up buy order:
order = bt.Order(order_type, ticker, share, price)
self.trading_system.create_order(order)
order_book = [order]
self.assertEqual(self.trading_system.order_book, order_book)
# Execute at '2020-01-02' and fail:
self.market.execute_orders(self.trading_system, date)
transaction = pd.DataFrame(columns=['Date', 'Ticker', 'Quantity'])
transaction = transaction.astype({'Ticker': str, 'Quantity': float})
account = (date, dict())
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
def test_multi_orders(self):
# Settings: (Use list to contain data)
date_range = pd.date_range('2020-01-01', periods=3, freq='D')
order_info = [
('market', 'ticker1', 3.0, None),
('target', 'ticker1', 2.0, 1.1),
('target', 'ticker2', 2.0, 0.2),
('limit_up', 'ticker1', 3.0, 2.0),
('limit_up', 'ticker2', -4.0, 1.8),
('limit_down', 'ticker2', 3.0, 4.2)
]
execute_price = [1.0, 1.1, None, 2.0, None, 4.2]
# Backtest on the period:
for i in range(len(date_range)):
date = date_range[i]
self.trading_system.create_order(bt.Order(*order_info[i*2]))
self.trading_system.create_order(bt.Order(*order_info[i*2+1]))
self.market.execute_orders(self.trading_system, date)
# Expected final result:
transaction = pd.DataFrame(dict(
Date = pd.to_datetime(['2020-01-01']*4+['2020-01-03']*4),
Ticker = ['ticker1', 'Cash']*3+['ticker2', 'Cash'],
Quantity = [3., -3., 2., -2.2, 3., -6., 3., -12.6],
))
account = bt.Account(date_range[-1], {
'ticker1': 8.,
'ticker2': 3.,
'Cash':-23.8,
})
order_book = [bt.Order(*order_info[2]), bt.Order(*order_info[4])]
assert_frame_equal(self.trading_system.transaction, transaction)
self.assertEqual(self.trading_system.account, account)
self.assertEqual(self.trading_system.order_book, order_book)
class TestDataUtil(unittest.TestCase):
def setUp(self):
def construct_price_data(data):
index = pd.date_range('2020-01-01', periods=len(data), freq='D')
price_data = pd.DataFrame(dict(
open = data,
high = [i*1.2 for i in data],
low =[i*0.8 for i in data],
close = data,
adj_close = data,
), index = index)
return price_data
# Setting up:
ticker1 = 'ticker1'
data1 = [1., 3., 2., 4., 3., 5.]
ticker2 = 'ticker2'
data2 = [5., 3., 4., 2., 3., 1.]
self.universe = {ticker1, ticker2}
self.price1 = construct_price_data(data1)
self.price2 = construct_price_data(data2)
# Market initiation:
self.market = bt.market()
self.market.add_stock(ticker1, self.price1)
self.market.add_stock(ticker2, self.price2)
# Period data:
self.start_str = '2020-01-02'
self.end_str = '2020-01-04'
self.period = 3
self.expect_price_data = self.price1.loc[self.start_str:self.end_str, :]
def test_price_whole_period(self):
assert_frame_equal(self.market.get_price('ticker1'), self.price1)
assert_frame_equal(self.market.get_price('ticker2'), self.price2)
def test_price_start_end(self):
start_date = pd.to_datetime(self.start_str)
end_date = pd.to_datetime(self.end_str)
# Both inputs date format:
price_data = self.market.get_price('ticker1', start_date, end_date)
assert_frame_equal(price_data, self.expect_price_data)
# Both inputs str format:
price_data = self.market.get_price('ticker1', self.start_str, self.end_str)
assert_frame_equal(price_data, self.expect_price_data)
# One date format and one str format:
price_data = self.market.get_price('ticker1', start_date, self.end_str)
assert_frame_equal(price_data, self.expect_price_data)
def test_price_start_period(self):
start_date = pd.to_datetime(self.start_str)
# Date format:
price_data = self.market.get_price('ticker1', start_date, period=self.period)
assert_frame_equal(price_data, self.expect_price_data)
# Str format:
price_data = self.market.get_price('ticker1', self.start_str, period=self.period)
assert_frame_equal(price_data, self.expect_price_data)
def test_price_end_period(self):
end_date= pd.to_datetime(self.end_str)
# Date format:
price_data = self.market.get_price('ticker1', end_date=end_date, period=self.period)
assert_frame_equal(price_data, self.expect_price_data)
# Str format:
price_data = self.market.get_price('ticker1', end_date=self.end_str, period=self.period)
assert_frame_equal(price_data, self.expect_price_data)
| 42.178819 | 96 | 0.624203 | 2,999 | 24,295 | 4.852618 | 0.046015 | 0.106301 | 0.136673 | 0.101835 | 0.910671 | 0.906961 | 0.896035 | 0.876589 | 0.863258 | 0.848554 | 0 | 0.033808 | 0.257337 | 24,295 | 575 | 97 | 42.252174 | 0.772765 | 0.070591 | 0 | 0.789474 | 0 | 0 | 0.048789 | 0 | 0 | 0 | 0 | 0 | 0.197368 | 1 | 0.059211 | false | 0 | 0.010965 | 0 | 0.078947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
555f16050a63c575433690599e587d250fc40ff3 | 47 | py | Python | jdhapi/serializers/__init__.py | C2DH/jdhbackend | 37efb0c5a6e1ea2acf8aca477d052a4e33f9bf40 | [
"MIT"
] | null | null | null | jdhapi/serializers/__init__.py | C2DH/jdhbackend | 37efb0c5a6e1ea2acf8aca477d052a4e33f9bf40 | [
"MIT"
] | 59 | 2020-11-27T08:58:35.000Z | 2022-03-30T15:54:01.000Z | jdhapi/serializers/__init__.py | C2DH/jdhbackend | 37efb0c5a6e1ea2acf8aca477d052a4e33f9bf40 | [
"MIT"
] | null | null | null | from .abstract import CreateAbstractSerializer
| 23.5 | 46 | 0.893617 | 4 | 47 | 10.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.976744 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e9542cfb70730cad109774236c59616dd3e3d2c2 | 1,857 | py | Python | rosa/asserts.py | OctavianLee/Rosa | 596f3729db6134ff11c3acad9a773ae3b2ecd5c6 | [
"MIT"
] | null | null | null | rosa/asserts.py | OctavianLee/Rosa | 596f3729db6134ff11c3acad9a773ae3b2ecd5c6 | [
"MIT"
] | null | null | null | rosa/asserts.py | OctavianLee/Rosa | 596f3729db6134ff11c3acad9a773ae3b2ecd5c6 | [
"MIT"
] | null | null | null | """
Basic Assert Suites.
"""
# -*- coding: utf-8 -*-
def eq_(left, right, msg=None):
"""Assert two elements are equal.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert left == right, msg
def neq_(left, right, msg=None):
"""Assert two elements are not equal.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert left != right, msg
def in_(left, right, msg=None):
"""Assert the left element is in the right.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert left in right, msg
def nin_(left, right, msg=None):
"""Assert the left element is not in the right.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert left not in right, msg
def is_ins(left, right, msg=None):
"""Assert the left element is an instance of the right.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert isinstance(left, right), msg
def eq_obj(left, right, msg=None):
"""Assert the value of the left object is equal to the right.
:param left: the left element in the assert.
:param right: the right element in the assert.
:param msg: the message of assert.
:except: AssertionError.
"""
assert left.__dict__ == right.__dict__, msg
| 22.373494 | 65 | 0.652666 | 268 | 1,857 | 4.470149 | 0.13806 | 0.058431 | 0.1202 | 0.180301 | 0.860601 | 0.860601 | 0.839733 | 0.839733 | 0.779633 | 0.684474 | 0 | 0.000717 | 0.248788 | 1,857 | 82 | 66 | 22.646341 | 0.858065 | 0.662897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.5 | false | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
e9b07ff217bb0dd95364c6ad71a111c003615127 | 1,448 | py | Python | tests/test_tpg.py | stklik/tpg.now | ddc3350ec087ee46fe652be9a002757a41cc756a | [
"MIT"
] | 3 | 2017-01-25T08:56:21.000Z | 2017-02-22T16:27:16.000Z | tests/test_tpg.py | stklik/tpg.now | ddc3350ec087ee46fe652be9a002757a41cc756a | [
"MIT"
] | 3 | 2017-02-25T12:50:16.000Z | 2017-03-07T08:13:03.000Z | tests/test_tpg.py | stklik/tpg.now | ddc3350ec087ee46fe652be9a002757a41cc756a | [
"MIT"
] | null | null | null | import json
from unittest.mock import patch, MagicMock
from tpgnow.communicator import Communicator
from tpgnow.tpg import Tpg
class TestTpg(object):
def test_getLineColour_existingColourWorks(self):
mockColours = """
{"timestamp":"2017-02-20T15:42:49+0100",
"colors":[
{"lineCode":"18","hexa":"cc3399","background":"cc3399","text":"FFFFFF"},
{"lineCode":"F","hexa":"FF9999","background":"FF9999","text":"000000"},
{"lineCode":"Y","hexa":"FF9999","background":"FF9999","text":"000000"},
{"lineCode":"Z","hexa":"FF9999","background":"FF9999","text":"000000"}]}
"""
mock = Communicator()
mock.sendRequest = MagicMock(return_value=mockColours)
Communicator.instance = mock
assert Tpg.getLineColor("Y")["background"] == "FF9999"
assert Tpg.getLineColor("Y")["foreground"] == "000000"
def test_getLineColour_nonExistingColour_returnsNone(self):
mockColours = """
{"timestamp":"2017-02-20T15:42:49+0100",
"colors":[
{"lineCode":"18","hexa":"cc3399","background":"cc3399","text":"FFFFFF"},
{"lineCode":"F","hexa":"FF9999","background":"FF9999","text":"000000"},
{"lineCode":"Y","hexa":"FF9999","background":"FF9999","text":"000000"},
{"lineCode":"Z","hexa":"FF9999","background":"FF9999","text":"000000"}]}
"""
mock = Communicator()
mock.sendRequest = MagicMock(return_value=mockColours)
Communicator.instance = mock
assert Tpg.getLineColor("B") == None
| 38.105263 | 72 | 0.65884 | 153 | 1,448 | 6.189542 | 0.326797 | 0.118268 | 0.126716 | 0.164731 | 0.711721 | 0.711721 | 0.711721 | 0.711721 | 0.711721 | 0.711721 | 0 | 0.117555 | 0.118785 | 1,448 | 37 | 73 | 39.135135 | 0.624608 | 0 | 0 | 0.6875 | 0 | 0 | 0.497928 | 0.450276 | 0 | 0 | 0 | 0 | 0.09375 | 1 | 0.0625 | false | 0 | 0.125 | 0 | 0.21875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e9b328b7733a2c82ee6936fcec7abe16015f5be2 | 13,751 | py | Python | pelion_systest_lib/cloud/libraries/device_directory.py | AnotherButler/e2e-edge-test-suite | 05d01922bc74d9ea4564a7561342ea428977ebff | [
"Apache-2.0"
] | null | null | null | pelion_systest_lib/cloud/libraries/device_directory.py | AnotherButler/e2e-edge-test-suite | 05d01922bc74d9ea4564a7561342ea428977ebff | [
"Apache-2.0"
] | 1 | 2021-07-30T20:43:56.000Z | 2021-08-06T19:40:24.000Z | pelion_systest_lib/cloud/libraries/device_directory.py | AnotherButler/e2e-edge-test-suite | 05d01922bc74d9ea4564a7561342ea428977ebff | [
"Apache-2.0"
] | 2 | 2021-07-29T15:47:25.000Z | 2022-03-07T08:38:20.000Z | # ----------------------------------------------------------------------------
# Copyright (c) 2020-2021, Pelion and affiliates.
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
This module is for cloud's Device Directory API functions
"""
class DeviceDirectoryAPI:
"""
A class that provides Device catalog related functionality.
https://www.pelion.com/docs/device-management/current/service-api-references/device-directory.html
"""
def __init__(self, rest_api):
"""
Initializes the Device Directory library
:param rest_api: RestAPI object
"""
self.api_version = 'v3'
self.cloud_api = rest_api
def create_device(self, device_data, api_key=None, expected_status_code=None):
"""
Create a device
:param device_data: Device data payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /devices response
"""
api_url = '/{}/devices'.format(self.api_version)
r = self.cloud_api.post(api_url, api_key, device_data, expected_status_code=expected_status_code)
return r
def delete_device(self, device_id, api_key=None, expected_status_code=None):
"""
Delete the defined device
:param device_id: Device id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: DELETE /devices/{device_id} response
"""
api_url = '/{}/devices/{}'.format(self.api_version, device_id)
r = self.cloud_api.delete(api_url, api_key, expected_status_code=expected_status_code)
return r
def get_device(self, device_id, api_key=None, expected_status_code=None):
"""
Get one device with device_id
:param device_id: Device id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /devices/{device_id} response
"""
api_url = '/{}/devices/{}'.format(self.api_version, device_id)
r = self.cloud_api.get(api_url, api_key, expected_status_code=expected_status_code)
return r
def get_devices(self, query_params=None, api_key=None, expected_status_code=None):
"""
Get all devices
:param query_params: e.g.{'limit': '1000', 'include': 'total_count'}
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /devices response
"""
api_url = '/{}/devices'.format(self.api_version)
r = self.cloud_api.get(api_url, api_key, params=query_params, expected_status_code=expected_status_code)
return r
def suspend_device(self, device_id, block, api_key=None, expected_status_code=None):
"""
Suspend a device
:param device_id: Device id
:param block: Suspension block
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /devices/{device_id}/suspend response
"""
api_url = '/{}/devices/{}/suspend'.format(self.api_version, device_id)
r = self.cloud_api.post(api_url, api_key, block, expected_status_code=expected_status_code)
return r
def resume_device(self, device_id, block, api_key=None, expected_status_code=None):
"""
Resume a device
:param device_id: Device id
:param block: Suspension block
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /devices/{device_id}/resume response
"""
api_url = '/{}/devices/{}/resume'.format(self.api_version, device_id)
r = self.cloud_api.post(api_url, api_key, block, expected_status_code=expected_status_code)
return r
def update_device_info(self, device_id, new_device_info, api_key=None, expected_status_code=None):
"""
Update the device info
:param device_id: Device id
:param new_device_info: New device data payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: PUT /devices/{device_id} response
"""
api_url = '/{}/devices/{}'.format(self.api_version, device_id)
r = self.cloud_api.put(api_url, api_key, new_device_info, expected_status_code=expected_status_code)
return r
def get_device_events(self, query_params=None, api_key=None, expected_status_code=None):
"""
Get device events
:param query_params: e.g.{'limit': '1000', 'include': 'total_count'}
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-events response
"""
api_url = '/{}/device-events'.format(self.api_version)
r = self.cloud_api.get(api_url, api_key, params=query_params, expected_status_code=expected_status_code)
return r
def create_device_query(self, device_query_data, api_key=None, expected_status_code=None):
"""
Create a device query
:param device_query_data: Device query payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /device-queries response
"""
api_url = '/{}/device-queries'.format(self.api_version)
r = self.cloud_api.post(api_url, api_key, device_query_data, expected_status_code=expected_status_code)
return r
def get_device_queries(self, query_params=None, api_key=None, expected_status_code=None):
"""
Get device queries
:param query_params: e.g.{'limit': '1000', 'include': 'total_count'}
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-queries response
"""
api_url = '/{}/device-queries'.format(self.api_version)
r = self.cloud_api.get(api_url, api_key, params=query_params, expected_status_code=expected_status_code)
return r
def update_device_query(self, device_query_id, device_query_data, api_key=None, expected_status_code=None):
"""
Update device query
:param device_query_id: Device query id
:param device_query_data: Device query payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: PUT /device-queries response
"""
api_url = '/{}/device-queries/{}'.format(self.api_version, device_query_id)
r = self.cloud_api.put(api_url, api_key, device_query_data, expected_status_code=expected_status_code)
return r
def delete_device_query(self, device_query_id, api_key=None, expected_status_code=None):
"""
Delete the defined device query
:param device_query_id: Device query id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: DELETE /device-queries/{device_query_id} response
"""
api_url = '/{}/device-queries/{}'.format(self.api_version, device_query_id)
r = self.cloud_api.delete(api_url, api_key, expected_status_code=expected_status_code)
return r
def create_device_group(self, device_group_data, api_key=None, expected_status_code=None):
"""
Create a device group
:param device_group_data: Device group payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /device-groups response
"""
api_url = '/{}/device-groups'.format(self.api_version)
r = self.cloud_api.post(api_url, api_key, device_group_data, expected_status_code=expected_status_code)
return r
def delete_device_group(self, device_group_id, api_key=None, expected_status_code=None):
"""
Delete the defined device group
:param device_group_id: Device group id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: DELETE /device-groups/{device_group_id} response
"""
api_url = '/{}/device-groups/{}'.format(self.api_version, device_group_id)
r = self.cloud_api.delete(api_url, api_key, expected_status_code=expected_status_code)
return r
def get_device_group(self, device_group_id, api_key=None, expected_status_code=None):
"""
Get one device group with device_group_id
:param device_group_id: Device group id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-groups/{device_group_id} response
"""
api_url = '/{}/device-groups/{}'.format(self.api_version, device_group_id)
r = self.cloud_api.get(api_url, api_key, expected_status_code=expected_status_code)
return r
def get_device_groups(self, query_params=None, api_key=None, expected_status_code=None):
"""
Get all device groups
:param query_params: e.g.{'limit': '1000', 'include': 'total_count'}
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-groups response
"""
api_url = '/{}/device-groups'.format(self.api_version)
r = self.cloud_api.get(api_url, api_key, params=query_params, expected_status_code=expected_status_code)
return r
def update_device_group(self, device_group_id, device_group_data, api_key=None, expected_status_code=None):
"""
Update device group
:param device_group_id: Device group id
:param device_group_data: Device group payload
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: PUT /device-groups/{device_group_id} response
"""
api_url = '/{}/device-groups/{}'.format(self.api_version, device_group_id)
r = self.cloud_api.put(api_url, api_key, device_group_data, expected_status_code=expected_status_code)
return r
def add_device_to_device_group(self, device_group_id, device_id, api_key=None, expected_status_code=None):
"""
Add device to device group
:param device_group_id: Device group id
:param device_id: Device id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /device-groups/{device_group_id}/devices/add response
"""
api_url = '/{}/device-groups/{}/devices/add'.format(self.api_version, device_group_id)
payload = {"device_id": device_id}
r = self.cloud_api.post(api_url, api_key, payload, expected_status_code=expected_status_code)
return r
def get_devices_from_device_group(self, device_group_id, api_key=None, expected_status_code=None):
"""
Get devices from device group
:param device_group_id: Device group id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-groups/{device_group_id}/devices/ response
"""
api_url = '/{}/device-groups/{}/devices/'.format(self.api_version, device_group_id)
r = self.cloud_api.get(api_url, api_key, expected_status_code=expected_status_code)
return r
def remove_device_from_device_group(self, device_group_id, device_id, api_key=None, expected_status_code=None):
"""
Remove device from device group
:param device_group_id: Device group id
:param device_id: Device id
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: POST /device-groups/{device_group_id}/devices/remove response
"""
api_url = '/{}/device-groups/{}/devices/remove'.format(self.api_version, device_group_id)
payload = {"device_id": device_id}
r = self.cloud_api.post(api_url, api_key, payload, expected_status_code=expected_status_code)
return r
def get_device_block_categories(self, query_params=None, api_key=None, expected_status_code=None):
"""
Get Device block categories
:param query_params: e.g.{'limit': '1000', 'include': 'total_count'}
:param api_key: Authentication key
:param expected_status_code: Asserts the result in the function
:return: GET /device-block-categories response
"""
api_url = '/{}/device-block-categories'.format(self.api_version)
r = self.cloud_api.get(api_url, api_key, params=query_params, expected_status_code=expected_status_code)
return r
| 42.704969 | 115 | 0.67486 | 1,818 | 13,751 | 4.822332 | 0.081958 | 0.134139 | 0.172465 | 0.043116 | 0.860386 | 0.850918 | 0.829588 | 0.826166 | 0.821832 | 0.821832 | 0 | 0.003277 | 0.223329 | 13,751 | 321 | 116 | 42.838006 | 0.817603 | 0.425351 | 0 | 0.577778 | 0 | 0 | 0.066344 | 0.031434 | 0 | 0 | 0 | 0 | 0 | 1 | 0.244444 | false | 0 | 0 | 0 | 0.488889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e9c0fca03fff6232fdb18a9879de1ce01d8b7000 | 26,808 | py | Python | quay/api/repositorynotification_api.py | angeiv/python-quay | 16072f87956d8f581ac9ebccc67f6563e977cf52 | [
"MIT"
] | null | null | null | quay/api/repositorynotification_api.py | angeiv/python-quay | 16072f87956d8f581ac9ebccc67f6563e977cf52 | [
"MIT"
] | null | null | null | quay/api/repositorynotification_api.py | angeiv/python-quay | 16072f87956d8f581ac9ebccc67f6563e977cf52 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Quay Frontend
This API allows you to perform many of the operations required to work with Quay repositories, users, and organizations. You can find out more at <a href=\"https://quay.io\">Quay</a>. # noqa: E501
OpenAPI spec version: v1
Contact: support@quay.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from quay.api_client import ApiClient
class RepositorynotificationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_repo_notification(self, body, repository, **kwargs): # noqa: E501
"""create_repo_notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_repo_notification(body, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param NotificationCreateRequest body: Request body contents. (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_repo_notification_with_http_info(body, repository, **kwargs) # noqa: E501
else:
(data) = self.create_repo_notification_with_http_info(body, repository, **kwargs) # noqa: E501
return data
def create_repo_notification_with_http_info(self, body, repository, **kwargs): # noqa: E501
"""create_repo_notification # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_repo_notification_with_http_info(body, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param NotificationCreateRequest body: Request body contents. (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_repo_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_repo_notification`") # noqa: E501
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `create_repo_notification`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_repo_notification(self, uuid, repository, **kwargs): # noqa: E501
"""delete_repo_notification # noqa: E501
Deletes the specified notification. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_repo_notification(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
else:
(data) = self.delete_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
return data
def delete_repo_notification_with_http_info(self, uuid, repository, **kwargs): # noqa: E501
"""delete_repo_notification # noqa: E501
Deletes the specified notification. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_repo_notification_with_http_info(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['uuid', 'repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_repo_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in params or
params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `delete_repo_notification`") # noqa: E501
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `delete_repo_notification`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in params:
path_params['uuid'] = params['uuid'] # noqa: E501
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/{uuid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_repo_notification(self, uuid, repository, **kwargs): # noqa: E501
"""get_repo_notification # noqa: E501
Get information for the specified notification. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repo_notification(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
else:
(data) = self.get_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
return data
def get_repo_notification_with_http_info(self, uuid, repository, **kwargs): # noqa: E501
"""get_repo_notification # noqa: E501
Get information for the specified notification. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repo_notification_with_http_info(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['uuid', 'repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_repo_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in params or
params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `get_repo_notification`") # noqa: E501
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `get_repo_notification`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in params:
path_params['uuid'] = params['uuid'] # noqa: E501
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/{uuid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_repo_notifications(self, repository, **kwargs): # noqa: E501
"""list_repo_notifications # noqa: E501
List the notifications for the specified repository. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_repo_notifications(repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_repo_notifications_with_http_info(repository, **kwargs) # noqa: E501
else:
(data) = self.list_repo_notifications_with_http_info(repository, **kwargs) # noqa: E501
return data
def list_repo_notifications_with_http_info(self, repository, **kwargs): # noqa: E501
"""list_repo_notifications # noqa: E501
List the notifications for the specified repository. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_repo_notifications_with_http_info(repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_repo_notifications" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `list_repo_notifications`") # noqa: E501
collection_formats = {}
path_params = {}
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reset_repository_notification_failures(self, uuid, repository, **kwargs): # noqa: E501
"""reset_repository_notification_failures # noqa: E501
Resets repository notification to 0 failures. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_repository_notification_failures(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reset_repository_notification_failures_with_http_info(uuid, repository, **kwargs) # noqa: E501
else:
(data) = self.reset_repository_notification_failures_with_http_info(uuid, repository, **kwargs) # noqa: E501
return data
def reset_repository_notification_failures_with_http_info(self, uuid, repository, **kwargs): # noqa: E501
"""reset_repository_notification_failures # noqa: E501
Resets repository notification to 0 failures. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_repository_notification_failures_with_http_info(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['uuid', 'repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reset_repository_notification_failures" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in params or
params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `reset_repository_notification_failures`") # noqa: E501
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `reset_repository_notification_failures`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in params:
path_params['uuid'] = params['uuid'] # noqa: E501
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/{uuid}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_repo_notification(self, uuid, repository, **kwargs): # noqa: E501
"""test_repo_notification # noqa: E501
Queues a test notification for this repository. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_repo_notification(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.test_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
else:
(data) = self.test_repo_notification_with_http_info(uuid, repository, **kwargs) # noqa: E501
return data
def test_repo_notification_with_http_info(self, uuid, repository, **kwargs): # noqa: E501
"""test_repo_notification # noqa: E501
Queues a test notification for this repository. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_repo_notification_with_http_info(uuid, repository, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str uuid: The UUID of the notification (required)
:param str repository: The full path of the repository. e.g. namespace/name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['uuid', 'repository'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_repo_notification" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'uuid' is set
if ('uuid' not in params or
params['uuid'] is None):
raise ValueError("Missing the required parameter `uuid` when calling `test_repo_notification`") # noqa: E501
# verify the required parameter 'repository' is set
if ('repository' not in params or
params['repository'] is None):
raise ValueError("Missing the required parameter `repository` when calling `test_repo_notification`") # noqa: E501
collection_formats = {}
path_params = {}
if 'uuid' in params:
path_params['uuid'] = params['uuid'] # noqa: E501
if 'repository' in params:
path_params['repository'] = params['repository'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2_implicit'] # noqa: E501
return self.api_client.call_api(
'/api/v1/repository/{repository}/notification/{uuid}/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.498452 | 201 | 0.620822 | 3,032 | 26,808 | 5.264182 | 0.060026 | 0.047115 | 0.030073 | 0.036088 | 0.948625 | 0.94236 | 0.937347 | 0.92394 | 0.912787 | 0.911033 | 0 | 0.01591 | 0.289578 | 26,808 | 645 | 202 | 41.562791 | 0.822158 | 0.341279 | 0 | 0.801724 | 0 | 0 | 0.203652 | 0.070457 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037356 | false | 0 | 0.011494 | 0 | 0.103448 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e9d38e989056cc6724b3e8473b1ec926b2c11a4b | 4,093 | py | Python | web_mining/html_table_parsing_test.py | gongchengshi/python_web_mining | becaf8f2e7e3ad007cda932a0f37b3f23741862b | [
"MIT"
] | null | null | null | web_mining/html_table_parsing_test.py | gongchengshi/python_web_mining | becaf8f2e7e3ad007cda932a0f37b3f23741862b | [
"MIT"
] | null | null | null | web_mining/html_table_parsing_test.py | gongchengshi/python_web_mining | becaf8f2e7e3ad007cda932a0f37b3f23741862b | [
"MIT"
] | null | null | null | from pprint import pprint
from html_table_parsing import *
s = """<table>
<tr><th>Event</th><th>Start Date</th><th>End Date</th></tr>
<tr><td>a</td><td>b</td><td>c</td></tr>
<tr><td>d</td><td>e</td><td>f</td></tr>
<tr><td>g</td><td>h</td><td>i</td></tr>
</table>
"""
s1 = """
<table border="0" cellpadding="3" cellspacing="1" width="100%">
<tr>
<td align="right" bgcolor="CCCCCC" nowrap="" valign="middle" width="15%"><font class="text"><b>Title</b></font></td>
<td align="left" bgcolor="CCCCCC" nowrap="" valign="middle" width="25%"><font class="text"><b>Contact</b></font></td>
<td align="left" bgcolor="CCCCCC" nowrap="" valign="middle" width="10%"><font class="text"><b>QC Date</b></font></td>
<td align="left" bgcolor="CCCCCC" nowrap="" valign="middle" width="15%"><font class="text"><b>Telephone</b></font></td>
<td align="left" bgcolor="CCCCCC" nowrap="" valign="middle" width="10%"><font class="text"><b>On-Site</b></font></td>
<td align="left" bgcolor="CCCCCC" colspan="2" nowrap="" valign="middle" width="25%"><font class="text"><b>E-Mail</b></font></td>
</tr>
<tr>
<td align="right" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Plant Manager</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Wade Cline</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Mar-2013</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">(713) 425-6520 </font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Yes</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width="25%"><font class='text'><a href='mailto:wcline@drkwf.com'>wcline@drkwf.com</a></font></td>
</tr>
<tr>
<td align="right" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Maintenance Manager</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Jon Doyle</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Dec-2012</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">() 425-6520 </font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Yes</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width="25%"></td>
</tr>
<tr>
<td align="right" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Utilities Manager</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Robert Kelly</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">Dec-2012</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">(713) 425-6520 103</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width=""><font class="text">No</font></td>
<td align="left" bgcolor="EEEEEE" nowrap="" valign="middle" width="25%"><font class='text'><a href='mailto:rkelly@dkrwf.com'>rkelly@dkrwf.com</a></font></td>
</tr>
</table>
"""
def test_parse_simple_table_s1():
result = parse_simple_table(s1, has_headers=True)
pprint(result)
result = parse_simple_table(s1, has_headers=False)
pprint(result)
def test_parse_simple_table():
result = parse_simple_table(s, has_headers=True)
pprint(result)
result = parse_simple_table(s, has_headers=False)
pprint(result)
def test_table_to_2d_dict():
import lxml.html
from pprint import pprint
doc = lxml.html.parse('tables.html')
for table_el in doc.xpath('//table'):
dct = table_to_2d_dict(table_el)
table = list(iter_2d_dict(dct))
pprint(table)
test_parse_simple_table_s1()
| 53.855263 | 165 | 0.622771 | 588 | 4,093 | 4.27381 | 0.178571 | 0.041385 | 0.171906 | 0.219658 | 0.79228 | 0.755273 | 0.755273 | 0.708317 | 0.695981 | 0.635893 | 0 | 0.021435 | 0.145126 | 4,093 | 75 | 166 | 54.573333 | 0.696771 | 0 | 0 | 0.34375 | 0 | 0.4375 | 0.82238 | 0.169069 | 0 | 0 | 0 | 0 | 0 | 1 | 0.046875 | false | 0 | 0.0625 | 0 | 0.109375 | 0.109375 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7597520ce94caeae8349df7d0ee5aa41c8ca2eeb | 2,868 | py | Python | tests/test_int_match.py | markpeek/tinyber-test | 79a409d764ead36d7b77e5d4c5c1a74388c46aeb | [
"Apache-2.0"
] | 13 | 2015-07-16T21:14:33.000Z | 2022-02-09T07:55:57.000Z | tests/test_int_match.py | markpeek/tinyber-test | 79a409d764ead36d7b77e5d4c5c1a74388c46aeb | [
"Apache-2.0"
] | null | null | null | tests/test_int_match.py | markpeek/tinyber-test | 79a409d764ead36d7b77e5d4c5c1a74388c46aeb | [
"Apache-2.0"
] | 10 | 2015-10-05T18:31:00.000Z | 2022-02-09T07:56:54.000Z | import unittest
from tinyber.c_nodes import int_max_size_type
class TestBasic(unittest.TestCase):
def test_int8(self):
size = "int8_t"
self.assertEqual(int_max_size_type(-2**7, 0), size)
self.assertEqual(int_max_size_type(-1, 0), size)
self.assertEqual(int_max_size_type(-1, 2**7 - 1), size)
self.assertEqual(int_max_size_type(-2**7, 2**7 - 1), size)
self.assertNotEqual(int_max_size_type(0, 2**7), size)
self.assertNotEqual(int_max_size_type(0, 2**7 - 1), size)
def test_int16(self):
size = "int16_t"
self.assertEqual(int_max_size_type(-1, 256), size)
self.assertEqual(int_max_size_type(-1, 2**15 - 1), size)
self.assertEqual(int_max_size_type(-2**15, 2**15 - 1), size)
self.assertNotEqual(int_max_size_type(0, 2**15), size)
self.assertNotEqual(int_max_size_type(0, 2**15 - 1), size)
def test_int32(self):
size = "int32_t"
self.assertEqual(int_max_size_type(-1, 2**16), size)
self.assertEqual(int_max_size_type(-1, 2**31 - 1), size)
self.assertEqual(int_max_size_type(-2**31, 2**31 - 1), size)
self.assertNotEqual(int_max_size_type(-1, 2**31), size)
self.assertNotEqual(int_max_size_type(0, 2**31), size)
def test_int64(self):
size = "int64_t"
self.assertEqual(int_max_size_type(-1, 2**32), size)
self.assertEqual(int_max_size_type(-1, 2**63 - 1), size)
self.assertEqual(int_max_size_type(-2**63, 2**63 - 1), size)
self.assertNotEqual(int_max_size_type(0, 2**63), size)
with self.assertRaises(NotImplementedError):
int_max_size_type(-1, 2**63)
with self.assertRaises(NotImplementedError):
int_max_size_type(-2**64, 0)
def test_uint8(self):
size = "uint8_t"
self.assertEqual(int_max_size_type(0, 0), size)
self.assertEqual(int_max_size_type(0, 2**8 - 1), size)
# self.assertNotEqual(int_max_size_type(0, -1), size)
self.assertNotEqual(int_max_size_type(0, 2**8), size)
def test_uint16(self):
size = "uint16_t"
self.assertEqual(int_max_size_type(0, 256), size)
self.assertEqual(int_max_size_type(0, 2**16 - 1), size)
self.assertNotEqual(int_max_size_type(0, 2**16), size)
def test_uint32(self):
size = "uint32_t"
self.assertEqual(int_max_size_type(0, 2**16), size)
self.assertEqual(int_max_size_type(0, 2**32 - 1), size)
self.assertNotEqual(int_max_size_type(0, 2**32), size)
def test_uint64(self):
size = "uint64_t"
self.assertEqual(int_max_size_type(0, 2**32), size)
self.assertEqual(int_max_size_type(0, 2**64 - 1), size)
with self.assertRaises(NotImplementedError):
int_max_size_type(0, 2**64)
if __name__ == '__main__':
unittest.main()
| 39.833333 | 68 | 0.642957 | 442 | 2,868 | 3.871041 | 0.10181 | 0.126242 | 0.210403 | 0.294565 | 0.80187 | 0.793103 | 0.793103 | 0.78083 | 0.644068 | 0.184687 | 0 | 0.078222 | 0.215481 | 2,868 | 71 | 69 | 40.394366 | 0.682222 | 0.017782 | 0 | 0.051724 | 0 | 0 | 0.023446 | 0 | 0 | 0 | 0 | 0 | 0.586207 | 1 | 0.137931 | false | 0 | 0.034483 | 0 | 0.189655 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
75a5a1b7231071fc75def8e6a57195e9f49c5da8 | 3,976 | py | Python | queue.py | edwintcloud/cs1.3_exercises | cd9cf117995a4bfc1ff8cfefac157a05179ee44f | [
"MIT"
] | null | null | null | queue.py | edwintcloud/cs1.3_exercises | cd9cf117995a4bfc1ff8cfefac157a05179ee44f | [
"MIT"
] | 3 | 2019-04-20T08:50:05.000Z | 2019-05-18T17:08:16.000Z | queue.py | edwintcloud/cs1.3_exercises | cd9cf117995a4bfc1ff8cfefac157a05179ee44f | [
"MIT"
] | null | null | null | #!python
from linkedlist import LinkedList
# Implement LinkedQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class LinkedQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new linked list to store the items
self.list = LinkedList()
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
return self.list.is_empty()
def length(self):
"""Return the number of items in this queue."""
return self.list.length()
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Best Case: O(1) Worse Case: O(1)"""
self.list.append(item)
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
# return None if queue is empty
if self.is_empty():
return None
# return first item in queue
return self.list.get_at_index(0)
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Best Case: O(n) Worse Case: O(n)"""
# raise value error if queue is empty
if self.list.is_empty():
raise ValueError("queue is empty")
# get first item in queue
first_item = self.front()
# remove item from queue
self.list.delete(first_item)
# return first item in queue
return first_item
# Implement ArrayQueue below, then change the assignment at the bottom
# to use this Queue implementation to verify it passes all tests
class ArrayQueue(object):
def __init__(self, iterable=None):
"""Initialize this queue and enqueue the given items, if any."""
# Initialize a new list (dynamic array) to store the items
self.list = list()
if iterable is not None:
for item in iterable:
self.enqueue(item)
def __repr__(self):
"""Return a string representation of this queue."""
return 'Queue({} items, front={})'.format(self.length(), self.front())
def is_empty(self):
"""Return True if this queue is empty, or False otherwise."""
return self.length() <= 0
def length(self):
"""Return the number of items in this queue."""
return len(self.list)
def enqueue(self, item):
"""Insert the given item at the back of this queue.
Best Case: O(1) Worse Case: O(1)"""
self.list.append(item)
def front(self):
"""Return the item at the front of this queue without removing it,
or None if this queue is empty."""
# return None if queue is empty
if self.is_empty():
return None
# return first item in queue
return self.list[0]
def dequeue(self):
"""Remove and return the item at the front of this queue,
or raise ValueError if this queue is empty.
Best Case: O(n) Worse Case: O(n)"""
# raise value error if queue is empty
if self.is_empty():
raise ValueError("queue is empty")
# get first item in queue
first_item = self.front()
# remove first item from queue
self.list.remove(first_item)
# return first item in queue
return first_item
# Implement LinkedQueue and ArrayQueue above, then change the assignment below
# to use each of your Queue implementations to verify they each pass all tests
# Queue = LinkedQueue
Queue = ArrayQueue
| 31.0625 | 78 | 0.620473 | 552 | 3,976 | 4.413043 | 0.168478 | 0.073892 | 0.059113 | 0.03202 | 0.855501 | 0.838259 | 0.819376 | 0.819376 | 0.819376 | 0.819376 | 0 | 0.00249 | 0.293008 | 3,976 | 127 | 79 | 31.307087 | 0.864105 | 0.482143 | 0 | 0.7 | 0 | 0 | 0.040988 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.28 | false | 0 | 0.02 | 0 | 0.58 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
75b2b07dd908c4d8a5ce56db0a45573f22d127d7 | 47 | py | Python | src/interpreter/functions/index.py | incrementals/b-star | 325bb51eafd5c5173582bf065b82d10ef9669275 | [
"MIT"
] | 2 | 2021-11-02T04:28:32.000Z | 2021-11-05T14:27:08.000Z | src/interpreter/functions/index.py | incrementals/b-star | 325bb51eafd5c5173582bf065b82d10ef9669275 | [
"MIT"
] | 6 | 2022-01-07T22:49:19.000Z | 2022-03-11T05:39:04.000Z | src/interpreter/functions/index.py | incrementals/b-star | 325bb51eafd5c5173582bf065b82d10ef9669275 | [
"MIT"
] | 4 | 2021-11-26T01:38:32.000Z | 2022-02-27T20:54:08.000Z | def index(arr, number):
return arr[number]
| 15.666667 | 23 | 0.680851 | 7 | 47 | 4.571429 | 0.714286 | 0.5625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191489 | 47 | 2 | 24 | 23.5 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
75c62dd6a877da8b4faed8523fa27827968fde3d | 139 | py | Python | door_phone/main.py | averak/2021_f | aac9eb5bdaf99e16760ce8b706be5beb3e14c948 | [
"MIT"
] | null | null | null | door_phone/main.py | averak/2021_f | aac9eb5bdaf99e16760ce8b706be5beb3e14c948 | [
"MIT"
] | 4 | 2021-03-14T06:27:30.000Z | 2021-03-21T04:42:17.000Z | door_phone/main.py | averak/2021_f | aac9eb5bdaf99e16760ce8b706be5beb3e14c948 | [
"MIT"
] | 3 | 2021-03-12T11:33:43.000Z | 2021-03-14T01:12:13.000Z | #!/usr/bin/env python3
from core import config
from core import demo
from core import message
if __name__ == '__main__':
demo.Demo()
| 15.444444 | 26 | 0.726619 | 21 | 139 | 4.428571 | 0.619048 | 0.258065 | 0.451613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008772 | 0.179856 | 139 | 8 | 27 | 17.375 | 0.807018 | 0.151079 | 0 | 0 | 0 | 0 | 0.068376 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
75f9eb23f7ca5ed75d65b2ce83e76c4e66d40008 | 24,850 | py | Python | gen_params.py | jireh-father/InsightFace_Pytorch | 6d635cfabe88b15e6a65d1965c48b9266d71e7ea | [
"MIT"
] | null | null | null | gen_params.py | jireh-father/InsightFace_Pytorch | 6d635cfabe88b15e6a65d1965c48b9266d71e7ea | [
"MIT"
] | null | null | null | gen_params.py | jireh-father/InsightFace_Pytorch | 6d635cfabe88b15e6a65d1965c48b9266d71e7ea | [
"MIT"
] | null | null | null | color_joined = [(128, 0, 0, 255), (139, 0, 0, 255), (165, 42, 42, 255), (178, 34, 34, 255),
(220, 20, 60, 255), (255, 0, 0, 255), (255, 99, 71, 255), (255, 127, 80, 255),
(205, 92, 92, 255), (240, 128, 128, 255), (233, 150, 122, 255), (250, 128, 114, 255),
(255, 160, 122, 255), (255, 69, 0, 255), (255, 140, 0, 255), (255, 165, 0, 255),
(255, 215, 0, 255), (184, 134, 11, 255), (218, 165, 32, 255), (238, 232, 170, 255),
(189, 183, 107, 255), (240, 230, 140, 255), (128, 128, 0, 255), (255, 255, 0, 255),
(154, 205, 50, 255), (85, 107, 47, 255), (107, 142, 35, 255), (124, 252, 0, 255),
(127, 255, 0, 255), (173, 255, 47, 255), (0, 100, 0, 255), (0, 128, 0, 255),
(34, 139, 34, 255), (0, 255, 0, 255), (50, 205, 50, 255), (144, 238, 144, 255),
(152, 251, 152, 255), (143, 188, 143, 255), (0, 250, 154, 255), (0, 255, 127, 255),
(46, 139, 87, 255), (102, 205, 170, 255), (60, 179, 113, 255), (32, 178, 170, 255),
(47, 79, 79, 255), (0, 128, 128, 255), (0, 139, 139, 255), (0, 255, 255, 255),
(0, 255, 255, 255), (224, 255, 255, 255), (0, 206, 209, 255), (64, 224, 208, 255),
(72, 209, 204, 255), (175, 238, 238, 255), (127, 255, 212, 255), (176, 224, 230, 255),
(95, 158, 160, 255), (70, 130, 180, 255), (100, 149, 237, 255), (0, 191, 255, 255),
(30, 144, 255, 255), (173, 216, 230, 255), (135, 206, 235, 255), (135, 206, 250, 255),
(25, 25, 112, 255), (0, 0, 128, 255), (0, 0, 139, 255), (0, 0, 205, 255),
(0, 0, 255, 255), (65, 105, 225, 255), (138, 43, 226, 255), (75, 0, 130, 255),
(72, 61, 139, 255), (106, 90, 205, 255), (123, 104, 238, 255), (147, 112, 219, 255),
(139, 0, 139, 255), (148, 0, 211, 255), (153, 50, 204, 255), (186, 85, 211, 255),
(128, 0, 128, 255), (216, 191, 216, 255), (221, 160, 221, 255), (238, 130, 238, 255),
(255, 0, 255, 255), (218, 112, 214, 255), (199, 21, 133, 255), (219, 112, 147, 255),
(255, 20, 147, 255), (255, 105, 180, 255), (255, 182, 193, 255), (255, 192, 203, 255),
(250, 235, 215, 255), (245, 245, 220, 255), (255, 228, 196, 255), (255, 235, 205, 255),
(245, 222, 179, 255), (255, 248, 220, 255), (255, 250, 205, 255), (250, 250, 210, 255),
(255, 255, 224, 255), (139, 69, 19, 255), (160, 82, 45, 255), (210, 105, 30, 255),
(205, 133, 63, 255), (244, 164, 96, 255), (222, 184, 135, 255), (210, 180, 140, 255),
(188, 143, 143, 255), (255, 228, 181, 255), (255, 222, 173, 255), (255, 218, 185, 255),
(255, 228, 225, 255), (255, 240, 245, 255), (250, 240, 230, 255), (253, 245, 230, 255),
(255, 239, 213, 255), (255, 245, 238, 255), (245, 255, 250, 255), (112, 128, 144, 255),
(119, 136, 153, 255), (176, 196, 222, 255), (230, 230, 250, 255), (255, 250, 240, 255),
(240, 248, 255, 255), (248, 248, 255, 255), (240, 255, 240, 255), (255, 255, 240, 255),
(240, 255, 255, 255), (255, 250, 250, 255), (0, 0, 0, 255), (105, 105, 105, 255),
(128, 128, 128, 255), (169, 169, 169, 255), (192, 192, 192, 255), (211, 211, 211, 255),
(220, 220, 220, 255), (245, 245, 245, 255), (255, 255, 255, 255), ]
paddings = {-0.15: 0.0323, -0.14: 0.0323, -0.13: 0.0323, -0.12: 0.0323, -0.11: 0.0323,
-0.1: 0.0323, -0.09: 0.0323, -0.08: 0.0323, -0.07: 0.0323, -0.06: 0.0323,
-0.05: 0.0323, -0.04: 0.0323, -0.03: 0.0323, -0.02: 0.0323, -0.01: 0.0323, 0.: 0.0323,
0.15: 0.0323, 0.14: 0.0323, 0.13: 0.0323, 0.12: 0.0323, 0.11: 0.0323,
0.1: 0.0323, 0.09: 0.0323, 0.08: 0.0323, 0.07: 0.0323, 0.06: 0.0323,
0.05: 0.0323, 0.04: 0.0323, 0.03: 0.0323, 0.02: 0.0323, 0.01: 0.031}
def get_train_params():
return {
"paddings": {
"left": {"range": {"v": [-0.15, 0.35], "p": 1.},
"separate": {"v": paddings, "p": 0.}},
"top": {"range": {"v": [-0.15, 0.4], "p": .6},
"separate": {"v": paddings, "p": 0.4}},
"right": {"range": {"v": [-0.15, 0.35], "p": 1.},
"separate": {"v": paddings, "p": 0.}},
"bottom": {"range": {"v": [-0.15, 0.4], "p": .6},
"separate": {"v": paddings, "p": 0.4}},
# "common": {"range": {"v": [-4, 15], "p": 0.1}, "list": {"v": [20], "p": 0.9}},
},
"bg": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [85, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.4},
"joined": {
# "list": {"v": color_joined, "p": 0.7},
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.3},
"p": 0.6},
"p": 0.35
},
"gradient": {
"direction": {
"horizontal": {"p": 0.5,
"angle": {
"separate": {"v": {"left": 0.4, "right": 0.4, "vertical": 0.2}, "p": 0.5},
"list": {"v": ["left", "right", "vertical"], "p": 0.5}}},
"vertical": {"p": 0.5},
},
"anchors": {
"count": {
"range": {"v": [2, 5], "p": 0.5},
"list": {"v": [2, 3, 4, 5], "p": 0.0},
"separate": {"v": {2: 0.5, 3: 0.5}, "p": 0.5}
},
"pos": {
"random": {"p": 0.5},
"uniform": {"p": 0.5}
},
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.6},
"joined": {
"list": {"v": color_joined, "p": 0.8},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.2},
"p": 0.4},
},
},
"p": 0.25
},
"image": {
"p": 0.4, # 0.5,
"position": {
"left": {"range": {"v": [0., 0.7], "p": 0.5},
"list": {"v": [0.2, 0.3, 0.4, 0.5], "p": 0.5}},
"top": {"range": {"v": [0., 0.7], "p": 0.5}, "list": {"v": [0.2, 0.3, 0.4, 0.5], "p": 0.5}},
},
"scale": {
"range": {"v": [0.5, 1.5], "p": 1.},
"list": {"v": [0.7, 0.8, 0.9, 1.1, 1.2, 1.3], "p": 0.},
"p": 0.4
},
"height_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.5},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.5},
"p": 0.3
},
"width_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.5},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.5},
"p": 0.3
}
}
},
"text": {
"font_size": {
"range": {"v": [10, 220], "p": 1.},
"list": {"v": [20, 30, 40, 50, 60, 70, 80, 90, 100], "p": 0.}
},
"border": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.3},
"p": 0.5},
},
"width": {
"range": {"v": [1, 4], "p": 0.5},
"separate": {"v": {1: 0.3, 2: 0.4, 3: 0.3}, "p": 0.5},
},
"blur_count": {
"range": {"v": [1, 2], "p": 0.5},
"list": {"v": [1], "p": 1.},
"p": 0.2
},
"p": 0.3
},
"italic": {"range": {"v": [0.1, 1.0], "p": 1.0},
"list": {"v": [1], "p": 0.},
"p": 0.1},
"shadow": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 1.0}, "p": 0.3},
"p": 0.5},
},
"width": {
"range": {"v": [1, 4], "p": 0.5},
"separate": {"v": {1: 0.3, 2: 0.4, 3: 0.3}, "p": 0.5},
},
"blur_count": {
"range": {"v": [1, 2], "p": 0.5},
"list": {"v": [1], "p": 1.},
"p": 0.3
},
"direction": {
"bottom_right": {"p": 0.3},
"right": {"p": 0.1},
"top_right": {"p": 0.1},
"top": {"p": 0.1},
"top_left": {"p": 0.1},
"left": {"p": 0.1},
"bottom_left": {"p": 0.1},
"bottom": {"p": 0.1},
},
"p": 0.3
},
"width_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.3},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.7},
"p": 0.1
},
"height_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.3},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.7},
"p": 0.1
},
"rotate": {
"range": {"v": [-75, 75], "p": .5},
"list": {"v": list([i - 40 for i in range(80)]), "p": 0.5},
"p": 0.5
},
"blur": {
"range": {"v": [1, 2], "p": 0.2},
"list": {"v": [1], "p": 0.8},
"p": 0.
},
"fg": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.7},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.3, (0, 0, 0, 255): 0.7}, "p": 0.3},
"p": 0.3},
"p": 0.7
},
"gradient": {
"direction": {
"horizontal": {"p": 0.5,
"angle": {
"separate": {"v": {"left": 0.4, "right": 0.4, "vertical": 0.2},
"p": 0.5},
"list": {"v": ["left", "right", "vertical"], "p": 0.5}}},
"vertical": {"p": 0.5},
},
"anchors": {
"count": {
"range": {"v": [2, 4], "p": 0.5},
"list": {"v": [2, 3, 4, 5], "p": 0.5},
},
"pos": {
"random": {"p": 0.5},
"uniform": {"p": 0.5}
},
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 1.0},
"p": 0.5},
},
},
"p": 0.3
},
},
}
}
def get_train_params_text_color():
return {
"paddings": {
"left": {"range": {"v": [-0.15, 0.35], "p": 1.},
"separate": {"v": paddings, "p": 0.}},
"top": {"range": {"v": [-0.15, 0.4], "p": .6},
"separate": {"v": paddings, "p": 0.4}},
"right": {"range": {"v": [-0.15, 0.35], "p": 1.},
"separate": {"v": paddings, "p": 0.}},
"bottom": {"range": {"v": [-0.15, 0.4], "p": .6},
"separate": {"v": paddings, "p": 0.4}},
# "common": {"range": {"v": [-4, 15], "p": 0.1}, "list": {"v": [20], "p": 0.9}},
},
"bg": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [85, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.4},
"joined": {
# "list": {"v": color_joined, "p": 0.7},
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.3},
"p": 0.6},
"p": 1.0
},
"gradient": {
"direction": {
"horizontal": {"p": 0.5,
"angle": {
"separate": {"v": {"left": 0.4, "right": 0.4, "vertical": 0.2}, "p": 0.5},
"list": {"v": ["left", "right", "vertical"], "p": 0.5}}},
"vertical": {"p": 0.5},
},
"anchors": {
"count": {
"range": {"v": [2, 5], "p": 0.5},
"list": {"v": [2, 3, 4, 5], "p": 0.0},
"separate": {"v": {2: 0.5, 3: 0.5}, "p": 0.5}
},
"pos": {
"random": {"p": 0.5},
"uniform": {"p": 0.5}
},
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.6},
"joined": {
"list": {"v": color_joined, "p": 0.8},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.2},
"p": 0.4},
},
},
"p": 0.
},
"image": {
"p": 0., # 0.5,
"position": {
"left": {"range": {"v": [0., 0.7], "p": 0.5},
"list": {"v": [0.2, 0.3, 0.4, 0.5], "p": 0.5}},
"top": {"range": {"v": [0., 0.7], "p": 0.5}, "list": {"v": [0.2, 0.3, 0.4, 0.5], "p": 0.5}},
},
"scale": {
"range": {"v": [0.5, 1.5], "p": 1.},
"list": {"v": [0.7, 0.8, 0.9, 1.1, 1.2, 1.3], "p": 0.},
"p": 0.4
},
"height_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.5},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.5},
"p": 0.3
},
"width_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.5},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.5},
"p": 0.3
}
}
},
"text": {
"font_size": {
"range": {"v": [10, 220], "p": 1.},
"list": {"v": [20, 30, 40, 50, 60, 70, 80, 90, 100], "p": 0.}
},
"border": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.5, (0, 0, 0, 255): 0.5}, "p": 0.3},
"p": 0.5},
},
"width": {
"range": {"v": [1, 4], "p": 0.5},
"separate": {"v": {1: 0.3, 2: 0.4, 3: 0.3}, "p": 0.5},
},
"blur_count": {
"range": {"v": [1, 2], "p": 0.5},
"list": {"v": [1], "p": 1.},
"p": 0.2
},
"p": 0.3
},
"italic": {"range": {"v": [0.1, 1.0], "p": 1.0},
"list": {"v": [1], "p": 0.},
"p": 0.1},
"shadow": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 1.0}, "p": 0.3},
"p": 0.5},
},
"width": {
"range": {"v": [1, 4], "p": 0.5},
"separate": {"v": {1: 0.3, 2: 0.4, 3: 0.3}, "p": 0.5},
},
"blur_count": {
"range": {"v": [1, 2], "p": 0.5},
"list": {"v": [1], "p": 1.},
"p": 0.3
},
"direction": {
"bottom_right": {"p": 0.3},
"right": {"p": 0.1},
"top_right": {"p": 0.1},
"top": {"p": 0.1},
"top_left": {"p": 0.1},
"left": {"p": 0.1},
"bottom_left": {"p": 0.1},
"bottom": {"p": 0.1},
},
"p": 0.3
},
"width_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.3},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.7},
"p": 0.1
},
"height_ratio": {
"range": {"v": [0.75, 1.25], "p": 0.3},
"list": {"v": [0.8, 0.9, 1.1, 1.2], "p": 0.7},
"p": 0.1
},
"rotate": {
"range": {"v": [-75, 75], "p": .5},
"list": {"v": list([i - 40 for i in range(80)]), "p": 0.5},
"p": 0.5
},
"blur": {
"range": {"v": [1, 2], "p": 0.2},
"list": {"v": [1], "p": 0.8},
"p": 0.
},
"fg": {
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.7},
"joined": {
"list": {"v": color_joined, "p": 0.7},
"separate": {"v": {(255, 255, 255, 255): 0.3, (0, 0, 0, 255): 0.7}, "p": 0.3},
"p": 0.3},
"p": 1.
},
"gradient": {
"direction": {
"horizontal": {"p": 0.5,
"angle": {
"separate": {"v": {"left": 0.4, "right": 0.4, "vertical": 0.2},
"p": 0.5},
"list": {"v": ["left", "right", "vertical"], "p": 0.5}}},
"vertical": {"p": 0.5},
},
"anchors": {
"count": {
"range": {"v": [2, 4], "p": 0.5},
"list": {"v": [2, 3, 4, 5], "p": 0.5},
},
"pos": {
"random": {"p": 0.5},
"uniform": {"p": 0.5}
},
"color": {
"disjoined": {
"r": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"g": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"b": {"range": {"v": [0, 255], "p": 1.0}, "list": {"v": [20], "p": 0.}},
"a": {"range": {"v": [120, 255], "p": 0.5}, "list": {"v": [255], "p": 0.5}},
"p": 0.5},
"joined": {
"list": {"v": color_joined, "p": 1.0},
"p": 0.5},
},
},
"p": 0.
},
},
}
}
| 50.817996 | 113 | 0.25827 | 2,904 | 24,850 | 2.195248 | 0.072658 | 0.077804 | 0.047059 | 0.047686 | 0.766275 | 0.755922 | 0.751373 | 0.751373 | 0.744784 | 0.744784 | 0 | 0.254849 | 0.481288 | 24,850 | 488 | 114 | 50.922131 | 0.239721 | 0.009859 | 0 | 0.735849 | 0 | 0 | 0.11131 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004193 | false | 0 | 0 | 0.004193 | 0.008386 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f989790837f906dbf648cda3c91ebe7b97482b41 | 4,712 | py | Python | airbyte-integrations/connectors/source-coingecko/source_coingecko/test.py | afredojala/airbyte | b5e713e0443180d1e4a6a81b0b4baa08e1cec67f | [
"MIT"
] | null | null | null | airbyte-integrations/connectors/source-coingecko/source_coingecko/test.py | afredojala/airbyte | b5e713e0443180d1e4a6a81b0b4baa08e1cec67f | [
"MIT"
] | null | null | null | airbyte-integrations/connectors/source-coingecko/source_coingecko/test.py | afredojala/airbyte | b5e713e0443180d1e4a6a81b0b4baa08e1cec67f | [
"MIT"
] | null | null | null | import json
res = json.loads('{"data":[{"timestamp":"2022-01-05T23:17:00.000000Z","hash":"loa3DCjClW2ZUbZNehtdWQuMB7y4ClUXUfTWgAtWl94","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1167694,"amount":183213,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T21:47:54.000000Z","hash":"QtZxrKj-H_QN1_ZYv-087M5MiVW5x9tRnb09mx0mKF0","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1167596,"amount":1484653,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T21:14:46.000000Z","hash":"BLo7uDqL1JUstoh-YZ9omIY1QFDfgf5ha3hSIiJRxWg","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1167563,"amount":411741,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T14:07:16.000000Z","hash":"SKghbNhXWpS9KxSxpXoTIXnIHhzPr7AWa1AoKldkQyY","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1167099,"amount":314726,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T11:24:21.000000Z","hash":"jYoy2xdwnUYWzH8YTKgBs7tudwto0P1ZNl4fTyI4rF4","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1166930,"amount":621238,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T10:53:10.000000Z","hash":"-FZNBwc5wmBclsvYgOZCB3L5lYKm78Urt4BeCTOsGCc","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1166897,"amount":299236,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T07:19:48.000000Z","hash":"1FMhMo5B7H-M-neE4-sKy0BDmGaVjNHN5ds1K7jGt1A","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1166672,"amount":1781653,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T06:42:57.000000Z","hash":"dWNos4lItDdjOjbT7gf7FVH5dezqVvEruhIorimM_vw","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1166635,"amount":1251192,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2022-01-05T00:36:25.000000Z","hash":"8oUR3gXzjBsI0wvHSbd1AS8kygpudgtRS8EAReEAp_A","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":1166271,"amount":1517743,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"}]}')
other = {"data":[]}
test = {"data":[{"timestamp":"2021-06-03T21:19:50.000000Z","hash":"infIoxCMOafeaELv4t5aztFSeRThcYhDzPXbBp0MmGQ","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869861,"amount":97207801,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T20:19:10.000000Z","hash":"1bATKQbAWPwCRmZogb_M1lk66OcRBE9UFc1YMzn-Dfo","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869820,"amount":87425029,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T18:21:27.000000Z","hash":"0nxvVZIDVdlLV9zZ9cQSW_3RyEbWAHQbQYIMzp7iwtQ","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869740,"amount":9090872,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T12:17:08.000000Z","hash":"0AQDH-0KuglPQt1_Nq56QfZs7uikHV7iZJEPfTSY2eA","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869498,"amount":104686967,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T07:47:26.000000Z","hash":"rB2XmqWW2N2uesQOOLL1a1R1sK0yBMYQrd0kRcVHknQ","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869260,"amount":9934122,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T03:45:50.000000Z","hash":"8FM78s-ptw1yldiZr3nhiEhVOS2ru7KYLMYDHlvRxhI","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869025,"amount":98300733,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T02:55:06.000000Z","hash":"mb2A0gEqZHDqKf_Fz7AmKRTAXYQDk-YDrqO9zMBEkhA","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":868980,"amount":66,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"},{"timestamp":"2021-06-03T23:02:06.000000Z","hash":"-WhIyo4nCLF66TZyfUE9dW6u1P4uxP7m66hTiw8FCUM","gateway":"11u4XLetGNzvkDDe15F74kWzK8Y2eFac1c6bLBtQhkdQzM8RxYp","block":869945,"amount":106892289,"account":"14Rqw67Er4T4mDitRR5hkvDbefXUhVQGiaVV4yzgSF3sVtu6PfU"}]}
def calculate_sum(data):
sum = 0
hashes = []
for i in data:
if i["hash"] in hashes:
continue
hashes.append(i["hash"])
sum += i["amount"]
sum *= 1e-8
return sum
print(calculate_sum(test['data']))
| 235.6 | 2,347 | 0.822156 | 351 | 4,712 | 11.008547 | 0.413105 | 0.048395 | 0.277174 | 0.146998 | 0.285973 | 0.040373 | 0 | 0 | 0 | 0 | 0 | 0.228702 | 0.02101 | 4,712 | 19 | 2,348 | 248 | 0.608931 | 0 | 0 | 0 | 0 | 0.066667 | 0.8559 | 0.785866 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.066667 | 0 | 0.2 | 0.066667 | 0 | 0 | 1 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
dded1a9584b5ef5427b9f773f44470704972a179 | 40,607 | py | Python | code/cadastrar_pet.py | pvictor1206/PETConfig | 636a34a1847e53204fedaca2024b0aa19f11168c | [
"MIT"
] | null | null | null | code/cadastrar_pet.py | pvictor1206/PETConfig | 636a34a1847e53204fedaca2024b0aa19f11168c | [
"MIT"
] | null | null | null | code/cadastrar_pet.py | pvictor1206/PETConfig | 636a34a1847e53204fedaca2024b0aa19f11168c | [
"MIT"
] | null | null | null | import utilidades
def insetir(): # insere o animal ao sistema
# NICOLAS E PAULO
resonsavel_pet = ''
data_adocao_pet = ''
codigo_pet = 1 # contador que mostra a quantidade de animais dentro do arquivo
try:
dados_pet = open("dados_pet.txt", 'r') #abre o arquivo para leitura
for quantidade in dados_pet: # insere o código do animal. (Ex: Animal 01: 1, Animal 02: 2, Animal 03: 3...)
codigo_pet += 1
codigo_pet = str(codigo_pet) # transforma o codigo em string para colocar no arquivo
dados_pet.close()
dados_pet = open("dados_pet.txt", 'a') # abre o arquivo para adicionar conteúdo
nome_animal = str(input("Nome do Animal: ")).strip().lower() # digita o nome do animal, tirando os espaços e transformando em letras minusculas
while True: # Laço em que verifica se a idade do animal é um numero e se o numero é positivo
idade_animal = str(input("Idade do animal(meses completos): ")).strip().lower()
try:
if int(idade_animal) >= 0:
break
else:
print("Idade inválida, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
while True: # laço que verifica qual o sexo do animal.
sexo_animal = str(input("Sexo do animal(M-Macho, F-Femia): ")).strip().upper()
try:
if sexo_animal == 'F' or sexo_animal == 'M':
break
else:
print("Sexo inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
while True: # laço que verifica qual o por do animal
porte_animal = str(input("Porte do animal(pequeno,medio,grande): ")).strip().lower()
try:
if porte_animal == 'pequeno' or porte_animal == 'medio' or porte_animal == 'grande':
break
else:
print("Porto inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
# digitar a raça do animal
raca_animal = str(input("Raça do animal (caso não tenha, coloque SRD): ")).strip().lower()
# Digitar o lar temporário
lar_temporario = str(input("Lar temporário: ")).strip()
# Digitar o lar anterior
lar_anterior = str(input("Local onde o animal está: ")).strip()
dados_pet.write(
nome_animal + ',' +
idade_animal + ',' +
sexo_animal + ',' +
porte_animal + ',' +
raca_animal + ',' +
lar_temporario + ',' +
lar_anterior + ',' +
resonsavel_pet + ',' +
data_adocao_pet + ',' +
codigo_pet + '\n'
)
dados_pet.close()
except: # se abrir o arquivo e nao existir, vai da erro, então, irá vir para cá...
codigo_pet = str(codigo_pet)
dados_pet = open("dados_pet.txt", 'a') # abre o arquivo para adicionar conteúdo
nome_animal = str(input("Nome do Animal: ")).strip().lower() # digita o nome do animal, tirando os espaços e transformando em letras minusculas
while True: # Laço em que verifica se a idade do animal é um numero e se o numero é positivo
idade_animal = str(input("Idade do animal(meses completos): ")).strip().lower()
try:
if int(idade_animal) >= 0:
break
else:
print("Idade inválida, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
while True: # laço que verifica qual o sexo do animal.
sexo_animal = str(input("Sexo do animal(M-Macho, F-Femia): ")).strip().upper()
try:
if sexo_animal == 'F' or sexo_animal == 'M':
break
else:
print("Sexo inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
while True: # laço que verifica qual o por do animal
porte_animal = str(input("Porte do animal(pequeno,medio,grande): ")).strip().lower()
try:
if porte_animal == 'pequeno' or porte_animal == 'medio' or porte_animal == 'grande':
break
else:
print("Porto inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
# digitar a raça do animal
raca_animal = str(input("Raça do animal (caso não tenha, coloque SRD): ")).strip().lower()
# Digitar o lar temporário
lar_temporario = str(input("Lar temporário: ")).strip()
# Digitar o lar anterior
lar_anterior = str(input("Onde o animal está: ")).strip()
dados_pet.write(
nome_animal + ',' +
idade_animal + ',' +
sexo_animal + ',' +
porte_animal + ',' +
raca_animal + ',' +
lar_temporario + ',' +
lar_anterior + ',' +
resonsavel_pet + ',' +
data_adocao_pet + ',' +
codigo_pet + '\n'
)
dados_pet.close()
def remover(): # remove o animal no sistema
# NICOLAS (deixa tudo '')
print(f"{utilidades.alterar_cor('roxo')}Remoção de Animal{utilidades.alterar_cor('limpar')}")
utilidades.linha()
cont_dados_pet = 0 # contador que mostra a quantidade de animais dentro do arquivo
verificacao_tamanho = 0 # verifica o tamanho da linha do arquivo
lista_pet = [] # lista que onde fica armazenado os animais
lista_dados_pet = [] # lista onde será armazenado os informações do arquivo
index = 0
# variáveis onde será armazenada as informações dos arquivos
nome_arquivo = ''
idade_arquivo = ''
sexo_arquivo = ''
porte_arquivo = ''
raca_arquivo = ''
lar_temp_arquivo = ''
lar_ante_arquivo = ''
responsavel_arquivo = ''
data_adocao_arquivo = ''
codigo_arquivo = '' # string onde fica o código
try: # verifica se existe dados dentro do arquivo
dados_pet = open("dados_pet.txt", 'r')
for dados in dados_pet: # adiciona todos os animais dentro de uma lista
lista_pet.append(dados)
if ',' in dados: # Ve se existe conteúdo na linha
lista_dados_pet.append({})
dados_pet.close()
for linha in lista_pet:
'''
laço em que analisa todos os dados dentro do arquivo e armazena dentro de uma lista com um dicionário.
Ex:
lista_dados_pet[
{
'nome': ---
'idade': ---
...
'codigo': ---
},
...
{
'nome': ---
'idade': ---
...
'codigo': ---
}
]
'''
for letra in linha:
if letra == ',':
cont_dados_pet += 1
if cont_dados_pet == 0 and letra != ',':
nome_arquivo += letra
if cont_dados_pet == 1 and letra != ',':
idade_arquivo += letra
if cont_dados_pet == 2 and letra != ',':
sexo_arquivo += letra
if cont_dados_pet == 3 and letra != ',':
porte_arquivo += letra
if cont_dados_pet == 4 and letra != ',':
raca_arquivo += letra
if cont_dados_pet == 5 and letra != ',':
lar_temp_arquivo += letra
if cont_dados_pet == 6 and letra != ',':
lar_ante_arquivo += letra
if cont_dados_pet == 7 and letra != ',':
responsavel_arquivo += letra
if cont_dados_pet == 8 and letra != ',':
data_adocao_arquivo += letra
if cont_dados_pet == 9 and letra != ',':
try:
if verificacao_tamanho == (len(lista_pet) - 1):
if letra == '':
continue
else:
codigo_arquivo += letra
lista_dados_pet[index]['Nome'] = nome_arquivo
lista_dados_pet[index]['Idade (Meses)'] = int(idade_arquivo)
lista_dados_pet[index]['Sexo'] = sexo_arquivo
lista_dados_pet[index]['Porte'] = porte_arquivo
lista_dados_pet[index]['Raça'] = raca_arquivo
lista_dados_pet[index]['Lar Temporário'] = lar_temp_arquivo
lista_dados_pet[index]['Onde o Animal Está'] = lar_ante_arquivo
lista_dados_pet[index]['Responsável'] = responsavel_arquivo
lista_dados_pet[index]['Data da Adocao'] = data_adocao_arquivo
lista_dados_pet[index]['Código'] = int(codigo_arquivo)
break
elif (type(int(letra) == int)):
codigo_arquivo += letra
except:
lista_dados_pet[index]['Nome'] = nome_arquivo
lista_dados_pet[index]['Idade (Meses)'] = int(idade_arquivo)
lista_dados_pet[index]['Sexo'] = sexo_arquivo
lista_dados_pet[index]['Porte'] = porte_arquivo
lista_dados_pet[index]['Raça'] = raca_arquivo
lista_dados_pet[index]['Lar Temporário'] = lar_temp_arquivo
lista_dados_pet[index]['Onde o Animal Está'] = lar_ante_arquivo
lista_dados_pet[index]['Responsável'] = responsavel_arquivo
lista_dados_pet[index]['Data da Adocao'] = data_adocao_arquivo
lista_dados_pet[index]['Código'] = int(codigo_arquivo)
nome_arquivo = ''
idade_arquivo = ''
sexo_arquivo = ''
porte_arquivo = ''
raca_arquivo = ''
lar_temp_arquivo = ''
lar_ante_arquivo = ''
responsavel_arquivo = ''
codigo_arquivo = ''
data_adocao_arquivo = ''
verificacao_tamanho += 1
index += 1
cont_dados_pet = 0
break
if len(lista_dados_pet) == 0:
print("Nenhum animal cadastrado")
else:
for dados in lista_dados_pet:
for k, v in dados.items():
print(f"{k} ------------------- {v}")
print(f"{utilidades.alterar_cor('amarelo')}=-{utilidades.alterar_cor('limpar')}" * 40)
escolha_pet = int(input(f"{utilidades.alterar_cor('ciano')}Digite o código do animal para excluir: {utilidades.alterar_cor('limpar')}"))
for dados in lista_dados_pet:
if dados['Código'] == escolha_pet:
utilidades.linha()
print(f"{utilidades.alterar_cor('verde')}REMOVER ANIMAL{utilidades.alterar_cor('limpar')}")
print(f"1. Nome ------------------- {dados['Nome']}")
print(f"2. Idade (Meses) ------------------- {dados['Idade (Meses)']}")
print(f"3. Sexo ------------------- {dados['Sexo']}")
print(f"4. Porte ------------------- {dados['Porte']}")
print(f"5. Raça ------------------- {dados['Raça']}")
print(f"6. Lar Temporário ------------------- {dados['Lar Temporário']}")
print(f"7. Onde o Animal Está ------------------- {dados['Onde o Animal Está']}")
print(f"8. Responsável ------------------- {dados['Responsável']}")
print(f"9. Data da Adocao' ------------------- {dados['Data da Adocao']}")
print(
f"{utilidades.alterar_cor('vermelho')}Código ------------------- {dados['Código']}{utilidades.alterar_cor('limpar')}")
arquivo = ''
contador = 1
utilidades.linha()
while True:
escolha_alteracao = str(input(f"{utilidades.alterar_cor('ciano')}Tem certeza que quer excluir?(sim/nao) {utilidades.alterar_cor('limpar')}")).strip().lower()
try:
if escolha_alteracao == 'sim' or escolha_alteracao == 'nao':
break
else:
print("Resposta inválida, Digite novamente")
except:
print("Resposta inválida, Digite novamente")
if escolha_alteracao == 'sim':
dados_pet = open("dados_pet.txt", 'r')
for linha in dados_pet:
if contador == escolha_pet:
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(
f"{utilidades.alterar_cor('roxo')}REMOÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
except:
print("Entrada invalida.")
def alterar(): # altera as informações do animal no sistema
# PAULO
print(f"{utilidades.alterar_cor('roxo')}Consulta de dados para alteração{utilidades.alterar_cor('limpar')}")
utilidades.linha()
cont_dados_pet = 0 # contador que mostra a quantidade de animais dentro do arquivo
verificacao_tamanho = 0 # verifica o tamanho da linha do arquivo
lista_pet = [] # lista que onde fica armazenado os animais
lista_dados_pet = [] # lista onde será armazenado os informações do arquivo
index = 0
pet_disponivel = False
# variáveis onde será armazenada as informações dos arquivos
nome_arquivo = ''
idade_arquivo = ''
sexo_arquivo = ''
porte_arquivo = ''
raca_arquivo = ''
lar_temp_arquivo = ''
lar_ante_arquivo = ''
responsavel_arquivo = ''
data_adocao_arquivo = ''
codigo_arquivo = '' # string onde fica o código
try: # verifica se existe dados dentro do arquivo
dados_pet = open("dados_pet.txt", 'r')
for dados in dados_pet: # adiciona todos os animais dentro de uma lista
lista_pet.append(dados)
if ',' in dados: # Ve se existe conteúdo na linha
lista_dados_pet.append({})
dados_pet.close()
# Laço em que analisa todos os dados dentro do arquivo, separando nos dicionarios respectivamente.
for linha in lista_pet:
'''
laço em que analisa todos os dados dentro do arquivo e armazena dentro de uma lista com um dicionário.
Ex:
lista_dados_pet[
{
'nome': ---
'idade': ---
...
'codigo': ---
},
...
{
'nome': ---
'idade': ---
...
'codigo': ---
}
]
'''
for letra in linha:
if letra == ',':
cont_dados_pet += 1
if cont_dados_pet == 0 and letra != ',':
nome_arquivo += letra
if cont_dados_pet == 1 and letra != ',':
idade_arquivo += letra
if cont_dados_pet == 2 and letra != ',':
sexo_arquivo += letra
if cont_dados_pet == 3 and letra != ',':
porte_arquivo += letra
if cont_dados_pet == 4 and letra != ',':
raca_arquivo += letra
if cont_dados_pet == 5 and letra != ',':
lar_temp_arquivo += letra
if cont_dados_pet == 6 and letra != ',':
lar_ante_arquivo += letra
if cont_dados_pet == 7 and letra != ',':
responsavel_arquivo += letra
if cont_dados_pet == 8 and letra != ',':
data_adocao_arquivo += letra
if cont_dados_pet == 9 and letra != ',':
try:
if verificacao_tamanho == (len(lista_pet) - 1):
if letra == '':
continue
else:
codigo_arquivo += letra
lista_dados_pet[index]['Nome'] = nome_arquivo
lista_dados_pet[index]['Idade (Meses)'] = int(idade_arquivo)
lista_dados_pet[index]['Sexo'] = sexo_arquivo
lista_dados_pet[index]['Porte'] = porte_arquivo
lista_dados_pet[index]['Raça'] = raca_arquivo
lista_dados_pet[index]['Lar Temporário'] = lar_temp_arquivo
lista_dados_pet[index]['Onde o Animal Está'] = lar_ante_arquivo
lista_dados_pet[index]['Responsável'] = responsavel_arquivo
lista_dados_pet[index]['Data da Adocao'] = data_adocao_arquivo
lista_dados_pet[index]['Código'] = int(codigo_arquivo)
break
elif (type(int(letra) == int)):
codigo_arquivo += letra
except:
lista_dados_pet[index]['Nome'] = nome_arquivo
lista_dados_pet[index]['Idade (Meses)'] = int(idade_arquivo)
lista_dados_pet[index]['Sexo'] = sexo_arquivo
lista_dados_pet[index]['Porte'] = porte_arquivo
lista_dados_pet[index]['Raça'] = raca_arquivo
lista_dados_pet[index]['Lar Temporário'] = lar_temp_arquivo
lista_dados_pet[index]['Onde o Animal Está'] = lar_ante_arquivo
lista_dados_pet[index]['Responsável'] = responsavel_arquivo
lista_dados_pet[index]['Data da Adocao'] = data_adocao_arquivo
lista_dados_pet[index]['Código'] = int(codigo_arquivo)
nome_arquivo = ''
idade_arquivo = ''
sexo_arquivo = ''
porte_arquivo = ''
raca_arquivo = ''
lar_temp_arquivo = ''
lar_ante_arquivo = ''
responsavel_arquivo = ''
codigo_arquivo = ''
data_adocao_arquivo = ''
verificacao_tamanho += 1
index += 1
cont_dados_pet = 0
break
# se nao existir nenhum animal do arquivo
if len(lista_dados_pet) == 0:
print("Nenhum animal cadastrado")
else: # se existir irá imprimir na tela para alteração
for dados in lista_dados_pet:
for k,v in dados.items():
print(f"{k} ------------------- {v}")
print(f"{utilidades.alterar_cor('amarelo')}=-{utilidades.alterar_cor('limpar')}" * 40)
escolha_pet = int(input(f"{utilidades.alterar_cor('ciano')}Digite o código do animal para editar: {utilidades.alterar_cor('limpar')}"))
for dados in lista_dados_pet: # dependendo da escolha. O usuário poderá escolher o que quer alterar
if dados['Código'] == escolha_pet:
utilidades.linha()
print(f"{utilidades.alterar_cor('verde')}ALTERAR INFORMAÇÕES{utilidades.alterar_cor('limpar')}")
print(f"1. Nome ------------------- {dados['Nome']}")
print(f"2. Idade (Meses) ------------------- {dados['Idade (Meses)']}")
print(f"3. Sexo ------------------- {dados['Sexo']}")
print(f"4. Porte ------------------- {dados['Porte']}")
print(f"5. Raça ------------------- {dados['Raça']}")
print(f"6. Lar Temporário ------------------- {dados['Lar Temporário']}")
print(f"7. Onde o Animal Está ------------------- {dados['Onde o Animal Está']}")
print(f"8. Responsável ------------------- {dados['Responsável']}")
print(f"9. Data da Adocao' ------------------- {dados['Data da Adocao']}")
print(f"{utilidades.alterar_cor('vermelho')}Código ------------------- {dados['Código']}{utilidades.alterar_cor('limpar')}")
pet_disponivel = True
arquivo = ''
contador = 1
utilidades.linha()
escolha_alteracao = int(input(f"{utilidades.alterar_cor('ciano')}Digite o número do item que você quer alterar: {utilidades.alterar_cor('limpar')}"))
"""
Escolhido o que quer alterar, condições serão feitas para motificar a parte escolhida.
"""
if escolha_alteracao == 1:
dados_pet = open("dados_pet.txt", 'r')
alterar_nome = str(input("Digite o nome: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += alterar_nome
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('roxo')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 2:
dados_pet = open("dados_pet.txt", 'r')
while True: # Laço em que verifica se a idade do animal é um numero e se o numero é positivo
alterar_idade = int(input("Digite a idade(meses): "))
try:
if alterar_idade >= 0:
break
else:
print("Idade inválida, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(alterar_idade)
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('roxo')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 3:
dados_pet = open("dados_pet.txt", 'r')
while True: # laço que verifica qual o sexo do animal.
alterar_sexo = str(input("Digite o sexo (M-Macho, F-Femia): ")).strip().upper()
try:
if alterar_sexo == 'F' or alterar_sexo == 'M':
break
else:
print("Sexo inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += alterar_sexo
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('roxo')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 4:
dados_pet = open("dados_pet.txt", 'r')
while True: # laço que verifica qual o por do animal
alterar_porte = str(input("Digite o porte do animal(pequeno,medio,grande): ")).strip().lower()
try:
if alterar_porte == 'pequeno' or alterar_porte == 'medio' or alterar_porte == 'grande':
break
else:
print("Porto inválido, Digite novamente")
except:
print("Entrada inválida, Digite novamente")
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += alterar_porte
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 5:
dados_pet = open("dados_pet.txt", 'r')
alterar_raca = str(input("Digite a raça: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += alterar_raca
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 6:
dados_pet = open("dados_pet.txt", 'r')
alterar_lar_temp = str(input("Digite o lar temporário: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += alterar_lar_temp
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 7:
dados_pet = open("dados_pet.txt", 'r')
onde_esta = str(input("Digite onde está o animal: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += onde_esta
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 8:
dados_pet = open("dados_pet.txt", 'r')
editar_responsavel = str(input("Digite o responsável: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += editar_responsavel
arquivo += ','
arquivo += dados['Data da Adocao']
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
elif escolha_alteracao == 9:
dados_pet = open("dados_pet.txt", 'r')
data_adocao = str(input("Digite o responsável: "))
for linha in dados_pet:
if contador == escolha_pet:
arquivo += dados['Nome']
arquivo += ','
arquivo += str(dados['Idade (Meses)'])
arquivo += ','
arquivo += dados['Sexo']
arquivo += ','
arquivo += dados['Porte']
arquivo += ','
arquivo += dados['Raça']
arquivo += ','
arquivo += dados['Lar Temporário']
arquivo += ','
arquivo += dados['Onde o Animal Está']
arquivo += ','
arquivo += dados['Responsável']
arquivo += ','
arquivo += data_adocao
arquivo += ','
arquivo += str(dados['Código'])
arquivo += '\n'
else:
arquivo += linha
contador += 1
dados_pet.close()
dados_pet = open("dados_pet.txt", 'w')
dados_pet.write(arquivo)
print(f"{utilidades.alterar_cor('verde')}MODIFICAÇÃO REALIZADA COM SUCESSO{utilidades.alterar_cor('limpar')}")
dados_pet.close()
else:
print("Entrada invalida")
# Se não existir nenhum animal no arquivo
if pet_disponivel == False:
print("Animal não encontrado")
except:
print("Entrada invalida.")
def cadastrar(): # Funcao principal do arquivo, serve para cadastrar, remover e alterar informações do animal
while True: #laço infinito até o usuário quiser sair.
print(f"{utilidades.alterar_cor('roxo')}CADASTRO/REMOVER ANIMAL{utilidades.alterar_cor('limpar')}")
print(f"{utilidades.alterar_cor('amarelo')}1. Inserir animal{utilidades.alterar_cor('limpar')}")
print(f"{utilidades.alterar_cor('amarelo')}2. Alterar animal{utilidades.alterar_cor('limpar')}")
print(f"{utilidades.alterar_cor('amarelo')}3. Remover animal{utilidades.alterar_cor('limpar')}")
print(f"{utilidades.alterar_cor('amarelo')}4. Sair{utilidades.alterar_cor('limpar')}")
utilidades.linha() # insere uma linha na tela
opcao = int(input(
f"{utilidades.alterar_cor('verde')}Digite uma das opções: {utilidades.alterar_cor('limpar')}")) # Lê a opção que será escolhida pelo usuário
if opcao == 1:
utilidades.linha() # insere uma linha na tela
insetir() # chama a funcao para inserir um animal
elif opcao == 2:
utilidades.linha() # insere uma linha na tela
alterar() # chama a funcao para alterar o animal
elif opcao == 3:
utilidades.linha() # insere uma linha na tela
remover() # chama a funcao para remover o animal
elif opcao == 4: # apenas vai para o menu principal
break
else:
utilidades.linha() # insere uma linha na tela
print(
f"{utilidades.alterar_cor('vermelho')}Opção Inválida, Digite novamente.{utilidades.alterar_cor('limpar')}")
utilidades.linha() # insere uma linha na tela | 42.475941 | 177 | 0.438053 | 3,544 | 40,607 | 4.857223 | 0.070824 | 0.082723 | 0.067387 | 0.041826 | 0.886546 | 0.871965 | 0.863018 | 0.844719 | 0.835599 | 0.832462 | 0 | 0.004628 | 0.451893 | 40,607 | 956 | 178 | 42.475941 | 0.768792 | 0.070456 | 0 | 0.892907 | 0 | 0.005563 | 0.189057 | 0.067444 | 0 | 0 | 0 | 0.004184 | 0 | 1 | 0.005563 | false | 0 | 0.001391 | 0 | 0.006954 | 0.097357 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
34a114385d0304b67e357bfb4f8eb7ffed8f48d2 | 48 | py | Python | identifiers_extractor/language_recognition/__init__.py | areyde/identifiers-extractor | 371d272737bb45ece4bcd6eec07321f8797b44c6 | [
"Apache-2.0"
] | 16 | 2020-07-04T04:43:12.000Z | 2022-03-05T11:51:24.000Z | identifiers_extractor/language_recognition/__init__.py | areyde/identifiers-extractor | 371d272737bb45ece4bcd6eec07321f8797b44c6 | [
"Apache-2.0"
] | 3 | 2020-07-15T09:17:24.000Z | 2021-01-29T17:17:40.000Z | identifiers_extractor/language_recognition/__init__.py | areyde/identifiers-extractor | 371d272737bb45ece4bcd6eec07321f8797b44c6 | [
"Apache-2.0"
] | 9 | 2020-07-07T17:32:10.000Z | 2021-11-25T21:38:04.000Z | from .utils import get_enry, get_enry_dir, main
| 24 | 47 | 0.8125 | 9 | 48 | 4 | 0.777778 | 0.388889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 48 | 1 | 48 | 48 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9b3ac788ce970f105b6f642bf988289e53df06e2 | 2,942 | py | Python | ArcGIS_Geoprocessing_Workflows/Batch_Calculate_Statistics.py | CourtneyStuart/FL_Habitat_Suitability | 84850c32699142df5117562ca570042abe626e6a | [
"CC-BY-4.0"
] | null | null | null | ArcGIS_Geoprocessing_Workflows/Batch_Calculate_Statistics.py | CourtneyStuart/FL_Habitat_Suitability | 84850c32699142df5117562ca570042abe626e6a | [
"CC-BY-4.0"
] | null | null | null | ArcGIS_Geoprocessing_Workflows/Batch_Calculate_Statistics.py | CourtneyStuart/FL_Habitat_Suitability | 84850c32699142df5117562ca570042abe626e6a | [
"CC-BY-4.0"
] | 1 | 2022-03-30T02:06:30.000Z | 2022-03-30T02:06:30.000Z | # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# Batch_Calculate_Statistics.py
# Created on: 2021-08-05 11:40:22.00000
# (generated by ArcGIS/ModelBuilder)
# Description:
# Calculate batch statistics for ASCII files
# ---------------------------------------------------------------------------
# Import arcpy module
import arcpy
# Local variables:
BPI_Broad_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\BPI_Broad.asc"
BPI_Fine_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\BPI_Fine.asc"
Curvature_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Curvature.asc"
Depth_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Depth.asc"
Habitat_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Habitat.asc"
Mangrove_Dist_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mangrove_Dist.asc"
Mean_Sum_DO_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_DO.asc"
Mean_Sum_Sal_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_Sal.asc"
Mean_Sum_Temp_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_Temp.asc"
Mean_Win_DO_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_DO.asc"
Mean_Win_Sal_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_Sal.asc"
Mean_Win_Temp_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_Temp.asc"
Plan_Curve_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Plan_Curve.asc"
Rugosity_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Rugosity.asc"
Slope_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Slope.asc"
StDev_Depth_asc = "Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\StDev_Depth.asc"
Batch_Calculate_Statistics_Succeeded = "true"
# Process: Batch Calculate Statistics
arcpy.BatchCalculateStatistics_management("Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\BPI_Broad.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\BPI_Fine.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Curvature.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Depth.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Habitat.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mangrove_Dist.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_DO.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_Sal.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Sum_Temp.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_DO.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_Sal.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Mean_Win_Temp.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Plan_Curve.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Rugosity.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\Slope.asc;Z:\\Courtney\\Stuart_MSc_Ch1\\Spatial_Predictors\\StDev_Depth.asc", "1", "1", "", "OVERWRITE")
| 81.722222 | 1,108 | 0.764786 | 420 | 2,942 | 4.954762 | 0.15 | 0.138395 | 0.230658 | 0.27679 | 0.852955 | 0.852955 | 0.852955 | 0.852955 | 0.852955 | 0.834214 | 0 | 0.019048 | 0.03637 | 2,942 | 35 | 1,109 | 84.057143 | 0.714991 | 0.138681 | 0 | 0 | 1 | 0.052632 | 0.821896 | 0.815946 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.052632 | 0 | 0.052632 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
9bb808a664a1bda49330ffe50d96dc58b0b77237 | 20,351 | py | Python | biserici_inlemnite/biserici/migrations/0004_auto_20210729_1859.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | biserici_inlemnite/biserici/migrations/0004_auto_20210729_1859.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | biserici_inlemnite/biserici/migrations/0004_auto_20210729_1859.py | ck-tm/biserici-inlemnite | c9d12127b92f25d3ab2fcc7b4c386419fe308a4e | [
"MIT"
] | null | null | null | # Generated by Django 3.1.13 on 2021-07-29 15:59
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
dependencies = [
('nomenclatoare', '0003_auto_20210729_1859'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('biserici', '0003_auto_20210729_1827'),
]
operations = [
migrations.AddField(
model_name='descriere',
name='detalii_elemente',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='descriere',
name='detalii_elemente_importante',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='descriere',
name='elemente',
field=models.ManyToManyField(help_text='Elemente ansamblu construit', to='nomenclatoare.ElementBiserica'),
),
migrations.AddField(
model_name='descriere',
name='elemente_importante',
field=models.ManyToManyField(help_text='Elemente ansamblu construit', to='nomenclatoare.ElementImportant'),
),
migrations.AddField(
model_name='descriere',
name='peisagistica_sitului',
field=models.ManyToManyField(to='nomenclatoare.PeisagisticaSit'),
),
migrations.AddField(
model_name='descriere',
name='relatia_cu_cimitirul',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='nomenclatoare.relatiecimitir'),
),
migrations.AddField(
model_name='descriere',
name='toponim_sursa',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='historicaldescriere',
name='detalii_elemente',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='historicaldescriere',
name='detalii_elemente_importante',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='historicaldescriere',
name='relatia_cu_cimitirul',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='nomenclatoare.relatiecimitir'),
),
migrations.AddField(
model_name='historicaldescriere',
name='toponim_sursa',
field=models.TextField(blank=True, null=True),
),
migrations.CreateModel(
name='PovesteBiserica',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('detalii', models.TextField()),
('sursa', models.TextField()),
('istoric', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.istoric')),
],
options={
'verbose_name_plural': 'Povești Biserică',
},
),
migrations.CreateModel(
name='InterventieBiserica',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datat', models.BooleanField(default=False)),
('an', models.IntegerField(blank=True, null=True)),
('observatii', models.TextField(blank=True, null=True)),
('sursa', models.TextField(blank=True, null=True)),
('este_ultima_interventie', models.BooleanField(default=False)),
('element', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='nomenclatoare.elementbiserica')),
('istoric', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.istoric')),
],
),
migrations.CreateModel(
name='HistoricalPovesteBiserica',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('detalii', models.TextField()),
('sursa', models.TextField()),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('istoric', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='biserici.istoric')),
],
options={
'verbose_name': 'historical poveste biserica',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='Fotografii',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('biserica', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='biserici.biserica')),
],
),
migrations.CreateModel(
name='FotografieTurn',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieTalpa',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieStreasina',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografiePortalPronaos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografiePortalNaos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografiePortal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografiePisanieInscriptieCtitorMester',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieObiectCult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieMobilierCandelabre',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieInvelitoare',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieInteriorDesfasurat',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieIconostasNaos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieIconostasAltar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieIcoana',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieFereastra',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieFatada',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieDetaliuPod',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('detaliu', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='nomenclatoare.detaliupodturn')),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieDetaliuBolta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieDegradariPod',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieDegradariInterior',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieDegradariExterioare',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieCruceBiserica',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieCheotoar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
migrations.CreateModel(
name='FotografieAnsamblu',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poza', models.ImageField(max_length=250, upload_to='fotografii')),
('detalii', models.TextField(blank=True, null=True)),
('copyright', models.CharField(blank=True, max_length=150, null=True)),
('fotografii', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='biserici.fotografii')),
],
),
]
| 55.452316 | 181 | 0.596727 | 1,942 | 20,351 | 6.121009 | 0.081359 | 0.041726 | 0.040044 | 0.062926 | 0.84849 | 0.846134 | 0.827711 | 0.813746 | 0.810718 | 0.792294 | 0 | 0.01301 | 0.259742 | 20,351 | 366 | 182 | 55.603825 | 0.776037 | 0.00226 | 0 | 0.805556 | 1 | 0 | 0.156578 | 0.033443 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022222 | 0 | 0.030556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fd1cbcd416695bf87eb6e2d4af9c206e7edb600c | 8,302 | py | Python | unit_tests/test_process_message.py | LandRegistry/maintain-feeder | b574dc56fe8d07d8842d7a9caa5bb479a79daae3 | [
"MIT"
] | null | null | null | unit_tests/test_process_message.py | LandRegistry/maintain-feeder | b574dc56fe8d07d8842d7a9caa5bb479a79daae3 | [
"MIT"
] | null | null | null | unit_tests/test_process_message.py | LandRegistry/maintain-feeder | b574dc56fe8d07d8842d7a9caa5bb479a79daae3 | [
"MIT"
] | 1 | 2021-04-11T05:25:00.000Z | 2021-04-11T05:25:00.000Z | from maintain_feeder.process_message import MessageProcessor
from maintain_feeder import config
import unittest
from unittest.mock import MagicMock, patch, ANY
from unit_tests.data import \
test_process_message_data_no_item_hash, test_process_message_data_no_item_signature, \
test_process_message_no_land_charge, test_process_message_data_valid
class TestMessageProcessor(unittest.TestCase):
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_no_hash(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
MessageProcessor(MagicMock()).process_message(
test_process_message_data_no_item_hash.process_message_no_item_hash, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_called()
mock_rabbitmq.publish_message.assert_called_with(ANY, ANY, ANY, ANY, config.QUEUE_NAME,
queue_name=config.QUEUE_NAME,
headers=ANY)
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_no_hash_max_retry(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = config.MAX_MSG_RETRY + 1
MessageProcessor(MagicMock()).process_message(
test_process_message_data_no_item_hash.process_message_no_item_hash, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_called()
mock_rabbitmq.publish_message.assert_called_with(ANY, ANY, ANY, ANY, config.ERROR_QUEUE_NAME,
queue_name=config.ERROR_QUEUE_NAME,
headers=ANY)
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_no_sig(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
MessageProcessor(MagicMock()).process_message(
test_process_message_data_no_item_signature.process_message_no_item_signature, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_called()
mock_rabbitmq.publish_message.assert_called_with(ANY, ANY, ANY, ANY, config.QUEUE_NAME,
queue_name=config.QUEUE_NAME,
headers=ANY)
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_no_land_charge(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
MessageProcessor(MagicMock()).process_message(
test_process_message_no_land_charge.process_message_valid_no_land_charge, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_called()
mock_rabbitmq.publish_message.assert_called_with(ANY, ANY, ANY, ANY, config.QUEUE_NAME,
queue_name=config.QUEUE_NAME,
headers=ANY)
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_valid_dupe(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
mock_session.query.return_value.filter_by.return_value.first.return_value = "NOT NONE"
MessageProcessor(MagicMock()).process_message(
test_process_message_data_valid.process_message_valid, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_not_called()
mock_message.ack.assert_called()
mock_rabbitmq.publish_message.assert_not_called()
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
@patch('maintain_feeder.process_message.Register')
def test_process_message_valid_gap(self, mock_register, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
mock_session.query.return_value.filter_by.return_value.first.return_value = None
mock_session.query.return_value.scalar.return_value = 1
MessageProcessor(MagicMock()).process_message(
test_process_message_data_valid.process_message_valid, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_not_called()
mock_message.ack.assert_called()
mock_rabbitmq.publish_message.assert_not_called()
mock_register.return_value.republish_entries.assert_called_with(list(range(2, 19)))
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_valid(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
mock_session.query.return_value.filter_by.return_value.first.return_value = None
mock_session.query.return_value.scalar.return_value = 18
MessageProcessor(MagicMock()).process_message(
test_process_message_data_valid.process_message_valid, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_called()
mock_session.commit.assert_not_called()
mock_message.reject.assert_not_called()
mock_message.ack.assert_called()
mock_rabbitmq.publish_message.assert_not_called()
@patch('maintain_feeder.process_message.local_land_charge')
@patch('maintain_feeder.process_message.session')
@patch('maintain_feeder.process_message.rabbitmq')
def test_process_message_null(self, mock_rabbitmq, mock_session, mock_local_land_charge):
mock_message = MagicMock()
mock_requests = MagicMock()
mock_message.headers.get.return_value = 0
mock_session.query.return_value.filter_by.return_value.first.return_value = None
mock_session.query.return_value.scalar.return_value = 18
MessageProcessor(MagicMock()).process_message(
test_process_message_data_valid.process_message_null, mock_message, mock_requests)
mock_local_land_charge.process_land_charge.assert_not_called()
mock_session.commit.assert_called()
mock_message.reject.assert_not_called()
mock_message.ack.assert_called()
mock_rabbitmq.publish_message.assert_not_called()
| 57.652778 | 119 | 0.731149 | 992 | 8,302 | 5.643145 | 0.071573 | 0.155055 | 0.097535 | 0.130046 | 0.926402 | 0.906752 | 0.897463 | 0.88353 | 0.88353 | 0.883173 | 0 | 0.002385 | 0.191881 | 8,302 | 143 | 120 | 58.055944 | 0.832017 | 0 | 0 | 0.796992 | 0 | 0 | 0.129126 | 0.128162 | 0 | 0 | 0 | 0 | 0.278195 | 1 | 0.06015 | false | 0 | 0.037594 | 0 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd1f3a805ff1f0e55761aff2af7fc17ad0cdd5ad | 99 | py | Python | columbus/config.py | Varsha311298/columbus | 1c4df07afd0337ddca7f7845211c85d54ae0b391 | [
"Apache-2.0"
] | null | null | null | columbus/config.py | Varsha311298/columbus | 1c4df07afd0337ddca7f7845211c85d54ae0b391 | [
"Apache-2.0"
] | null | null | null | columbus/config.py | Varsha311298/columbus | 1c4df07afd0337ddca7f7845211c85d54ae0b391 | [
"Apache-2.0"
] | null | null | null | AUTH_SECRET = 'DE78B79CE52A0D55CDE52E825226A0A246DA71B4174EB6026964C0F33A1F7ADF'
BEARER = 'BEARER' | 33 | 80 | 0.878788 | 5 | 99 | 17.2 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.419355 | 0.060606 | 99 | 3 | 81 | 33 | 0.505376 | 0 | 0 | 0 | 0 | 0 | 0.7 | 0.64 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1f292bf41fb3b37d37c2fe1d5c84f12e14225224 | 10,084 | py | Python | interferogram/sentinel/parpython.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | interferogram/sentinel/parpython.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | interferogram/sentinel/parpython.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os, requests
def create_ifg_job(project, stitched, auto_bbox, ifg_id, master_zip_url, master_orbit_url,
slave_zip_url, slave_orbit_url, swathnum, bbox, wuid=None, job_num=None):
"""Map function for create interferogram job json creation."""
if wuid is None or job_num is None:
raise RuntimeError("Need to specify workunit id and job num.")
# set job type and disk space reqs
if stitched:
job_type = "sentinel_ifg-stitched"
disk_usage = "300GB"
else:
job_type = "sentinel_ifg-singlescene"
disk_usage = "200GB"
# set job queue based on project
job_queue = "%s-job_worker-large" % project
# set localize urls
localize_urls = [
{ 'url': master_orbit_url },
{ 'url': slave_orbit_url },
]
for m in master_zip_url: localize_urls.append({'url': m})
for s in slave_zip_url: localize_urls.append({'url': s})
return {
"job_name": "%s-%s" % (job_type, ifg_id),
"job_type": "job:%s" % job_type,
"job_queue": job_queue,
"container_mappings": {
"$HOME/.netrc": "/home/ops/.netrc",
"$HOME/.aws": "/home/ops/.aws",
"$HOME/verdi/etc/settings.conf": "/home/ops/ariamh/conf/settings.conf"
},
"soft_time_limit": 86400,
"time_limit": 86700,
"payload": {
# sciflo tracking info
"_sciflo_wuid": wuid,
"_sciflo_job_num": job_num,
# job params
"project": project,
"id": ifg_id,
"master_zip_url": master_zip_url,
"master_zip_file": [os.path.basename(i) for i in master_zip_url],
"master_orbit_url": master_orbit_url,
"master_orbit_file": os.path.basename(master_orbit_url),
"slave_zip_url": slave_zip_url,
"slave_zip_file": [os.path.basename(i) for i in slave_zip_url],
"slave_orbit_url": slave_orbit_url,
"slave_orbit_file": os.path.basename(slave_orbit_url),
"swathnum": swathnum,
"bbox": bbox,
"auto_bbox": auto_bbox,
# v2 cmd
"_command": "/home/ops/ariamh/interferogram/sentinel/create_ifg.sh",
# disk usage
"_disk_usage": disk_usage,
# localize urls
"localize_urls": localize_urls,
}
}
def create_offset_job(project, stitched, auto_bbox, ifg_id, master_zip_url, master_orbit_url,
slave_zip_url, slave_orbit_url, swathnum, bbox, ampcor_skip_width, ampcor_skip_height,
ampcor_src_win_width, ampcor_src_win_height, ampcor_src_width, ampcor_src_height,
dem_urls, wuid=None, job_num=None):
"""Map function for create interferogram job json creation."""
if wuid is None or job_num is None:
raise RuntimeError("Need to specify workunit id and job num.")
# set job type and disk space reqs
if stitched:
job_type = "sentinel_offset-stitched"
disk_usage = "300GB"
else:
job_type = "sentinel_offset-singlescene"
disk_usage = "200GB"
# set job queue based on project
job_queue = "%s-job_worker-large" % project
# set localize urls
localize_urls = [
{ 'url': master_orbit_url },
{ 'url': slave_orbit_url },
]
for m in master_zip_url: localize_urls.append({'url': m})
for s in slave_zip_url: localize_urls.append({'url': s})
return {
"job_name": "%s-%s" % (job_type, ifg_id),
"job_type": "job:%s" % job_type,
"job_queue": job_queue,
"container_mappings": {
"$HOME/.netrc": "/home/ops/.netrc",
"$HOME/.aws": "/home/ops/.aws",
"$HOME/verdi/etc/settings.conf": "/home/ops/ariamh/conf/settings.conf"
},
"soft_time_limit": 86400,
"time_limit": 86700,
"payload": {
# sciflo tracking info
"_sciflo_wuid": wuid,
"_sciflo_job_num": job_num,
# job params
"project": project,
"id": ifg_id,
"master_zip_url": master_zip_url,
"master_zip_file": [os.path.basename(i) for i in master_zip_url],
"master_orbit_url": master_orbit_url,
"master_orbit_file": os.path.basename(master_orbit_url),
"slave_zip_url": slave_zip_url,
"slave_zip_file": [os.path.basename(i) for i in slave_zip_url],
"slave_orbit_url": slave_orbit_url,
"slave_orbit_file": os.path.basename(slave_orbit_url),
"swathnum": swathnum,
"bbox": bbox,
"auto_bbox": auto_bbox,
"ampcor_skip_width": ampcor_skip_width,
"ampcor_skip_height": ampcor_skip_height,
"ampcor_src_win_width": ampcor_src_win_width,
"ampcor_src_win_height": ampcor_src_win_height,
"ampcor_src_width": ampcor_src_width,
"ampcor_src_height": ampcor_src_height,
"dem_urls": dem_urls,
# v2 cmd
"_command": "/home/ops/ariamh/interferogram/sentinel/create_offset.sh",
# disk usage
"_disk_usage": disk_usage,
# localize urls
"localize_urls": localize_urls,
}
}
def create_rsp_job(project, stitched, auto_bbox, rsp_id, master_zip_url, master_orbit_url,
slave_zip_url, slave_orbit_url, swathnum, bbox, wuid=None, job_num=None):
"""Map function for create slc_pair product job json creation."""
if wuid is None or job_num is None:
raise RuntimeError("Need to specify workunit id and job num.")
# set job type and disk space reqs
if stitched:
job_type = "sentinel_rsp-stitched"
disk_usage = "300GB"
else:
job_type = "sentinel_rsp-singlescene"
disk_usage = "200GB"
# set job queue based on project
job_queue = "%s-job_worker-large" % project
# set localize urls
localize_urls = [
{ 'url': master_orbit_url },
{ 'url': slave_orbit_url },
]
for m in master_zip_url: localize_urls.append({'url': m})
for s in slave_zip_url: localize_urls.append({'url': s})
return {
"job_name": "%s-%s" % (job_type, rsp_id),
"job_type": "job:%s" % job_type,
"job_queue": job_queue,
"container_mappings": {
"$HOME/.netrc": "/home/ops/.netrc",
"$HOME/.aws": "/home/ops/.aws",
"$HOME/verdi/etc/settings.conf": "/home/ops/ariamh/conf/settings.conf"
},
"soft_time_limit": 86400,
"time_limit": 86700,
"payload": {
# sciflo tracking info
"_sciflo_wuid": wuid,
"_sciflo_job_num": job_num,
# job params
"project": project,
"id": rsp_id,
"master_zip_url": master_zip_url,
"master_zip_file": [os.path.basename(i) for i in master_zip_url],
"master_orbit_url": master_orbit_url,
"master_orbit_file": os.path.basename(master_orbit_url),
"slave_zip_url": slave_zip_url,
"slave_zip_file": [os.path.basename(i) for i in slave_zip_url],
"slave_orbit_url": slave_orbit_url,
"slave_orbit_file": os.path.basename(slave_orbit_url),
"swathnum": swathnum,
"bbox": bbox,
"auto_bbox": auto_bbox,
# v2 cmd
"_command": "/home/ops/ariamh/interferogram/sentinel/create_rsp.sh",
# disk usage
"_disk_usage": disk_usage,
# localize urls
"localize_urls": localize_urls,
}
}
def create_xtstitched_ifg_job(project, stitched, auto_bbox, ifg_id, master_zip_url, master_orbit_url,
slave_zip_url, slave_orbit_url, bbox, wuid=None, job_num=None):
"""Map function for create cross-track stitched interferogram job json creation."""
if wuid is None or job_num is None:
raise RuntimeError("Need to specify workunit id and job num.")
# set job type and disk space reqs
if stitched:
job_type = "sentinel_ifg-stitched"
else:
job_type = "sentinel_ifg-singlescene"
# set job queue based on project
job_queue = "%s-job_worker-large" % project
# set localize urls
localize_urls = [
{ 'url': master_orbit_url },
{ 'url': slave_orbit_url },
]
for m in master_zip_url: localize_urls.append({'url': m})
for s in slave_zip_url: localize_urls.append({'url': s})
return {
"job_name": "%s-%s" % (job_type, ifg_id),
"job_type": "job:%s" % job_type,
"job_queue": job_queue,
"container_mappings": {
"$HOME/.netrc": "/home/ops/.netrc",
"$HOME/.aws": "/home/ops/.aws",
"$HOME/verdi/etc/settings.conf": "/home/ops/ariamh/conf/settings.conf"
},
"soft_time_limit": 86400,
"time_limit": 86700,
"payload": {
# sciflo tracking info
"_sciflo_wuid": wuid,
"_sciflo_job_num": job_num,
# job params
"project": project,
"id": ifg_id,
"master_zip_url": master_zip_url,
"master_zip_file": [os.path.basename(i) for i in master_zip_url],
"master_orbit_url": master_orbit_url,
"master_orbit_file": os.path.basename(master_orbit_url),
"slave_zip_url": slave_zip_url,
"slave_zip_file": [os.path.basename(i) for i in slave_zip_url],
"slave_orbit_url": slave_orbit_url,
"slave_orbit_file": os.path.basename(slave_orbit_url),
"swathnum": None,
"bbox": bbox,
"auto_bbox": auto_bbox,
# v2 cmd
"_command": "/home/ops/ariamh/interferogram/sentinel/create_ifg.sh",
# disk usage
"_disk_usage": "500GB",
# localize urls
"localize_urls": localize_urls,
}
}
| 35.507042 | 105 | 0.583697 | 1,259 | 10,084 | 4.347895 | 0.087371 | 0.043844 | 0.043844 | 0.052612 | 0.974973 | 0.964925 | 0.943186 | 0.934783 | 0.913774 | 0.895871 | 0 | 0.009213 | 0.300377 | 10,084 | 283 | 106 | 35.632509 | 0.76669 | 0.084887 | 0 | 0.794118 | 0 | 0 | 0.270409 | 0.073896 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019608 | false | 0 | 0.004902 | 0 | 0.044118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1f8b4669832791339c38d197c693a4345b3165bf | 29 | py | Python | PyCharm/primer12.py | PervykhDarya/laba10 | 631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3 | [
"MIT"
] | null | null | null | PyCharm/primer12.py | PervykhDarya/laba10 | 631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3 | [
"MIT"
] | null | null | null | PyCharm/primer12.py | PervykhDarya/laba10 | 631e676d9683f3b1d4e976778bf9ddf0f8f5cfc3 | [
"MIT"
] | null | null | null | a = {0, 1, 12, 3, 2}
print(a) | 14.5 | 20 | 0.448276 | 8 | 29 | 1.625 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.272727 | 0.241379 | 29 | 2 | 21 | 14.5 | 0.318182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.5 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
2f667ecec6bc921f2b96ec87084174336cfe1e2e | 37,068 | py | Python | src/uproot/models/TBranch.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | src/uproot/models/TBranch.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | src/uproot/models/TBranch.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | # BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
"""
This module defines versioned models for ``TBranch`` and its subclasses.
See :doc:`uproot.behaviors.TBranch` for definitions of ``TTree``-reading
functions.
"""
import struct
import numpy
import uproot
import uproot.models.TH
_tbranch10_format1 = struct.Struct(">iiiiqiIiqqq")
_tbranch10_dtype1 = numpy.dtype(">i4")
_tbranch10_dtype2 = numpy.dtype(">i8")
_tbranch10_dtype3 = numpy.dtype(">i8")
_rawstreamer_ROOT_3a3a_TIOFeatures_v1 = (
None,
b"@\x00\x00\xe0\xff\xff\xff\xffTStreamerInfo\x00@\x00\x00\xca\x00\t@\x00\x00\x1f\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x11ROOT::TIOFeatures\x00\x1a\xa1/\x10\x00\x00\x00\x01@\x00\x00\x99\xff\xff\xff\xffTObjArray\x00@\x00\x00\x87\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00@\x00\x00n\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00S\x00\x02@\x00\x00M\x00\x04@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fIOBits\x00\x00\x00\x00\x0b\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\runsigned char\x00",
"ROOT::TIOFeatures",
1,
)
_rawstreamer_TBranch_v13 = (
None,
b'@\x00\rf\xff\xff\xff\xffTStreamerInfo\x00@\x00\rP\x00\t@\x00\x00\x15\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x07TBranch\x00\x10\x97\x8a\xac\x00\x00\x00\r@\x00\r)\xff\xff\xff\xffTObjArray\x00@\x00\r\x17\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x00@\x00\x00\x8d\xff\xff\xff\xffTStreamerBase\x00@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00y\xff\xff\xff\xffTStreamerBase\x00@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00\x85\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00j\x00\x02@\x00\x00d\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfCompress\x1fCompression level and algorithm\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x86\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00k\x00\x02@\x00\x00e\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSize\x1eInitial Size of Basket Buffer\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\xa6\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00\x8b\x00\x02@\x00\x00\x85\x00\x04@\x00\x00W\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0ffEntryOffsetLen:Initial Length of fEntryOffset table in the basket buffers\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x83\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00h\x00\x02@\x00\x00b\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfWriteBasket\x1aLast basket number written\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\xa3\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00\x88\x00\x02@\x00\x00\x82\x00\x04@\x00\x00O\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfEntryNumber5Current entry number (last one filled in this branch)\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x9c\xff\xff\xff\xffTStreamerObjectAny\x00@\x00\x00\x81\x00\x02@\x00\x00{\x00\x04@\x00\x00?\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfIOFeatures&IO features for newly-created baskets.\x00\x00\x00>\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11ROOT::TIOFeatures@\x00\x00y\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00^\x00\x02@\x00\x00X\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fOffset\x15Offset of this branch\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x88\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00m\x00\x02@\x00\x00g\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfMaxBaskets Maximum number of Baskets so far\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00z\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00_\x00\x02@\x00\x00Y\x00\x04@\x00\x00+\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfSplitLevel\x12Branch split level\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00{\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00`\x00\x02@\x00\x00Z\x00\x04@\x00\x00\'\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fEntries\x11Number of entries\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x95\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00z\x00\x02@\x00\x00t\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfFirstEntry(Number of the first entry in this branch\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xa1\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00\x86\x00\x02@\x00\x00\x80\x00\x04@\x00\x00M\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfTotBytes6Total number of bytes in all leaves before compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\xa0\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00\x85\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00L\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfZipBytes5Total number of bytes in all leaves after compression\x00\x00\x00\x10\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08Long64_t@\x00\x00\x8b\xff\xff\xff\xffTStreamerObject\x00@\x00\x00s\x00\x02@\x00\x00m\x00\x04@\x00\x009\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfBranches"-> List of Branches of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x87\xff\xff\xff\xffTStreamerObject\x00@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x005\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fLeaves -> List of leaves of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\x89\xff\xff\xff\xffTStreamerObject\x00@\x00\x00q\x00\x02@\x00\x00k\x00\x04@\x00\x007\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fBaskets!-> List of baskets of this branch\x00\x00\x00=\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tTObjArray@\x00\x00\xac\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x8e\x00\x02@\x00\x00p\x00\x04@\x00\x00A\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketBytes\'[fMaxBaskets] Length of baskets on file\x00\x00\x00+\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04int*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xbb\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x9d\x00\x02@\x00\x00\x7f\x00\x04@\x00\x00K\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0cfBasketEntry1[fMaxBaskets] Table of first entry in each basket\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xb3\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x95\x00\x02@\x00\x00w\x00\x04@\x00\x00C\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x0bfBasketSeek*[fMaxBaskets] Addresses of baskets on file\x00\x00\x008\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\tLong64_t*\x00\x00\x00\r\x0bfMaxBaskets\x07TBranch@\x00\x00\xb0\xff\xff\xff\xffTStreamerString\x00@\x00\x00\x98\x00\x02@\x00\x00\x92\x00\x04@\x00\x00`\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\tfFileNameIName of file where buffers are stored ("" if in same file as Tree header)\x00\x00\x00A\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07TString\x00',
"TBranch",
13,
)
class Model_TBranch_v10(uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranch`` version 10.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttFill", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
(
self._members["fCompress"],
self._members["fBasketSize"],
self._members["fEntryOffsetLen"],
self._members["fWriteBasket"],
self._members["fEntryNumber"],
self._members["fOffset"],
self._members["fMaxBaskets"],
self._members["fSplitLevel"],
self._members["fEntries"],
self._members["fTotBytes"],
self._members["fZipBytes"],
) = cursor.fields(chunk, _tbranch10_format1, context)
self._members["fBranches"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fLeaves"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._cursor_baskets = cursor.copy()
if file.options["minimal_ttree_metadata"]:
if not cursor.skip_over(chunk, context):
file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
else:
self._members["fBaskets"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
tmp = _tbranch10_dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketBytes"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch10_dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketEntry"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch10_dtype3
if context.get("speedbump", True):
if cursor.bytes(chunk, 1, context)[0] == 2:
tmp = numpy.dtype(">i8")
self._members["fBasketSeek"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
if file.options["minimal_ttree_metadata"]:
cursor.skip_after(self)
else:
self._members["fFileName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
@property
def member_names(self):
out = [
"fCompress",
"fBasketSize",
"fEntryOffsetLen",
"fWriteBasket",
"fEntryNumber",
"fOffset",
"fMaxBaskets",
"fSplitLevel",
"fEntries",
"fTotBytes",
"fZipBytes",
"fBranches",
"fLeaves",
"fBaskets",
"fBasketBytes",
"fBasketEntry",
"fBasketSeek",
"fFileName",
]
if self._file.options["minimal_ttree_metadata"]:
out.remove("fBaskets")
out.remove("fFileName")
return out
base_names_versions = [("TNamed", 1), ("TAttFill", 1)]
class_flags = {}
class_code = None
_tbranch11_format1 = struct.Struct(">iiiiqiIiqqqq")
_tbranch11_dtype1 = numpy.dtype(">i4")
_tbranch11_dtype2 = numpy.dtype(">i8")
_tbranch11_dtype3 = numpy.dtype(">i8")
class Model_TBranch_v11(uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranch`` version 11.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttFill", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
(
self._members["fCompress"],
self._members["fBasketSize"],
self._members["fEntryOffsetLen"],
self._members["fWriteBasket"],
self._members["fEntryNumber"],
self._members["fOffset"],
self._members["fMaxBaskets"],
self._members["fSplitLevel"],
self._members["fEntries"],
self._members["fFirstEntry"],
self._members["fTotBytes"],
self._members["fZipBytes"],
) = cursor.fields(chunk, _tbranch11_format1, context)
self._members["fBranches"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fLeaves"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._cursor_baskets = cursor.copy()
if file.options["minimal_ttree_metadata"]:
if not cursor.skip_over(chunk, context):
file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
else:
self._members["fBaskets"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
tmp = _tbranch11_dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketBytes"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch11_dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketEntry"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch11_dtype3
if context.get("speedbump", True):
if cursor.bytes(chunk, 1, context)[0] == 2:
tmp = numpy.dtype(">i8")
self._members["fBasketSeek"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
if file.options["minimal_ttree_metadata"]:
cursor.skip_after(self)
else:
self._members["fFileName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
@property
def member_names(self):
out = [
"fCompress",
"fBasketSize",
"fEntryOffsetLen",
"fWriteBasket",
"fEntryNumber",
"fOffset",
"fMaxBaskets",
"fSplitLevel",
"fEntries",
"fFirstEntry",
"fTotBytes",
"fZipBytes",
"fBranches",
"fLeaves",
"fBaskets",
"fBasketBytes",
"fBasketEntry",
"fBasketSeek",
"fFileName",
]
if self._file.options["minimal_ttree_metadata"]:
out.remove("fBaskets")
out.remove("fFileName")
return out
base_names_versions = [("TNamed", 1), ("TAttFill", 1)]
class_flags = {}
class_code = None
_tbranch12_format1 = struct.Struct(">iiiiqiIiqqqq")
_tbranch12_dtype1 = numpy.dtype(">i4")
_tbranch12_dtype2 = numpy.dtype(">i8")
_tbranch12_dtype3 = numpy.dtype(">i8")
class Model_TBranch_v12(uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranch`` version 12.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttFill", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
(
self._members["fCompress"],
self._members["fBasketSize"],
self._members["fEntryOffsetLen"],
self._members["fWriteBasket"],
self._members["fEntryNumber"],
self._members["fOffset"],
self._members["fMaxBaskets"],
self._members["fSplitLevel"],
self._members["fEntries"],
self._members["fFirstEntry"],
self._members["fTotBytes"],
self._members["fZipBytes"],
) = cursor.fields(chunk, _tbranch12_format1, context)
self._members["fBranches"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fLeaves"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._cursor_baskets = cursor.copy()
if file.options["minimal_ttree_metadata"]:
if not cursor.skip_over(chunk, context):
file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
else:
self._members["fBaskets"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
tmp = _tbranch12_dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketBytes"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch12_dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketEntry"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch12_dtype3
if context.get("speedbump", True):
if cursor.bytes(chunk, 1, context)[0] == 2:
tmp = numpy.dtype(">i8")
self._members["fBasketSeek"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
if file.options["minimal_ttree_metadata"]:
cursor.skip_after(self)
else:
self._members["fFileName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
@property
def member_names(self):
out = [
"fCompress",
"fBasketSize",
"fEntryOffsetLen",
"fWriteBasket",
"fEntryNumber",
"fOffset",
"fMaxBaskets",
"fSplitLevel",
"fEntries",
"fFirstEntry",
"fTotBytes",
"fZipBytes",
"fBranches",
"fLeaves",
"fBaskets",
"fBasketBytes",
"fBasketEntry",
"fBasketSeek",
"fFileName",
]
if self._file.options["minimal_ttree_metadata"]:
out.remove("fBaskets")
out.remove("fFileName")
return out
base_names_versions = [("TNamed", 1), ("TAttFill", 1)]
class_flags = {}
class_code = None
_tbranch13_format1 = struct.Struct(">iiiiq")
_tbranch13_format2 = struct.Struct(">iIiqqqq")
_tbranch13_dtype1 = numpy.dtype(">i4")
_tbranch13_dtype2 = numpy.dtype(">i8")
_tbranch13_dtype3 = numpy.dtype(">i8")
class Model_TBranch_v13(uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranch`` version 13.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttFill", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
(
self._members["fCompress"],
self._members["fBasketSize"],
self._members["fEntryOffsetLen"],
self._members["fWriteBasket"],
self._members["fEntryNumber"],
) = cursor.fields(chunk, _tbranch13_format1, context)
self._members["fIOFeatures"] = file.class_named("ROOT::TIOFeatures").read(
chunk, cursor, context, file, self._file, self.concrete
)
(
self._members["fOffset"],
self._members["fMaxBaskets"],
self._members["fSplitLevel"],
self._members["fEntries"],
self._members["fFirstEntry"],
self._members["fTotBytes"],
self._members["fZipBytes"],
) = cursor.fields(chunk, _tbranch13_format2, context)
self._members["fBranches"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fLeaves"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._cursor_baskets = cursor.copy()
if file.options["minimal_ttree_metadata"]:
if not cursor.skip_over(chunk, context):
file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
else:
self._members["fBaskets"] = file.class_named("TObjArray").read(
chunk, cursor, context, file, self._file, self.concrete
)
tmp = _tbranch13_dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketBytes"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch13_dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fBasketEntry"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
tmp = _tbranch13_dtype3
if context.get("speedbump", True):
if cursor.bytes(chunk, 1, context)[0] == 2:
tmp = numpy.dtype(">i8")
self._members["fBasketSeek"] = cursor.array(
chunk, self.member("fMaxBaskets"), tmp, context
)
if file.options["minimal_ttree_metadata"]:
cursor.skip_after(self)
else:
self._members["fFileName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
@property
def member_names(self):
out = [
"fCompress",
"fBasketSize",
"fEntryOffsetLen",
"fWriteBasket",
"fEntryNumber",
"fIOFeatures",
"fOffset",
"fMaxBaskets",
"fSplitLevel",
"fEntries",
"fFirstEntry",
"fTotBytes",
"fZipBytes",
"fBranches",
"fLeaves",
"fBaskets",
"fBasketBytes",
"fBasketEntry",
"fBasketSeek",
"fFileName",
]
if self._file.options["minimal_ttree_metadata"]:
out.remove("fBaskets")
out.remove("fFileName")
return out
base_names_versions = [("TNamed", 1), ("TAttFill", 2)]
class_flags = {}
class_code = None
class_rawstreamers = (
uproot.models.TH._rawstreamer_TCollection_v3,
uproot.models.TH._rawstreamer_TSeqCollection_v0,
uproot.models.TObjArray._rawstreamer_TObjArray_v3,
_rawstreamer_ROOT_3a3a_TIOFeatures_v1,
uproot.models.TH._rawstreamer_TAttFill_v2,
uproot.models.TH._rawstreamer_TString_v2,
uproot.models.TH._rawstreamer_TObject_v1,
uproot.models.TH._rawstreamer_TNamed_v1,
_rawstreamer_TBranch_v13,
)
class Model_TBranch(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TBranch``.
"""
known_versions = {
10: Model_TBranch_v10,
11: Model_TBranch_v11,
12: Model_TBranch_v12,
13: Model_TBranch_v13,
}
_tbranchelement8_format1 = struct.Struct(">Iiiiii")
class Model_TBranchElement_v8(
uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranchElement`` version 8.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TBranch", 10).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._cursor_baskets = self._bases[0]._cursor_baskets
self._members["fClassName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fParentName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fClonesName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
(
self._members["fCheckSum"],
self._members["fClassVersion"],
self._members["fID"],
self._members["fType"],
self._members["fStreamerType"],
self._members["fMaximum"],
) = cursor.fields(chunk, _tbranchelement8_format1, context)
self._members["fBranchCount"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fBranchCount2"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
base_names_versions = [("TBranch", 10)]
member_names = [
"fClassName",
"fParentName",
"fClonesName",
"fCheckSum",
"fClassVersion",
"fID",
"fType",
"fStreamerType",
"fMaximum",
"fBranchCount",
"fBranchCount2",
]
class_flags = {"has_read_object_any": True}
class_code = None
_tbranchelement9_format1 = struct.Struct(">Iiiiii")
class Model_TBranchElement_v9(
uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranchElement`` version 9.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TBranch", 12).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._cursor_baskets = self._bases[0]._cursor_baskets
self._members["fClassName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fParentName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fClonesName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
(
self._members["fCheckSum"],
self._members["fClassVersion"],
self._members["fID"],
self._members["fType"],
self._members["fStreamerType"],
self._members["fMaximum"],
) = cursor.fields(chunk, _tbranchelement9_format1, context)
self._members["fBranchCount"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fBranchCount2"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
base_names_versions = [("TBranch", 12)]
member_names = [
"fClassName",
"fParentName",
"fClonesName",
"fCheckSum",
"fClassVersion",
"fID",
"fType",
"fStreamerType",
"fMaximum",
"fBranchCount",
"fBranchCount2",
]
class_flags = {"has_read_object_any": True}
class_code = None
_tbranchelement10_format1 = struct.Struct(">Ihiiii")
class Model_TBranchElement_v10(
uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranchElement`` version 10.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TBranch", 12).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._cursor_baskets = self._bases[0]._cursor_baskets
self._members["fClassName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fParentName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fClonesName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
(
self._members["fCheckSum"],
self._members["fClassVersion"],
self._members["fID"],
self._members["fType"],
self._members["fStreamerType"],
self._members["fMaximum"],
) = cursor.fields(chunk, _tbranchelement10_format1, context)
self._members["fBranchCount"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
self._members["fBranchCount2"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self.concrete
)
base_names_versions = [("TBranch", 12)]
member_names = [
"fClassName",
"fParentName",
"fClonesName",
"fCheckSum",
"fClassVersion",
"fID",
"fType",
"fStreamerType",
"fMaximum",
"fBranchCount",
"fBranchCount2",
]
class_flags = {"has_read_object_any": True}
class_code = None
class Model_TBranchElement(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TBranchElement``.
"""
known_versions = {
8: Model_TBranchElement_v8,
9: Model_TBranchElement_v9,
10: Model_TBranchElement_v10,
}
class Model_TBranchObject_v1(
uproot.behaviors.TBranch.TBranch, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TBranchObject`` version 1.
"""
behaviors = (uproot.behaviors.TBranch.TBranch,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"""memberwise serialization of {}
in file {}""".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TBranch", 13).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._members["fClassName"] = file.class_named("TString").read(
chunk, cursor, context, file, self._file, self.concrete
)
base_names_versions = [("TBranch", 13)]
member_names = ["fClassName"]
class_flags = {}
class Model_TBranchObject(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TBranchObject``.
"""
known_versions = {
1: Model_TBranchObject_v1,
}
uproot.classes["TBranch"] = Model_TBranch
uproot.classes["TBranchElement"] = Model_TBranchElement
uproot.classes["TBranchObject"] = Model_TBranchObject
| 42.460481 | 8,186 | 0.602164 | 4,405 | 37,068 | 4.938252 | 0.082633 | 0.267825 | 0.325197 | 0.35802 | 0.846826 | 0.834138 | 0.805498 | 0.782926 | 0.778881 | 0.768078 | 0 | 0.124864 | 0.254829 | 37,068 | 872 | 8,187 | 42.509174 | 0.66266 | 0.026114 | 0 | 0.728972 | 0 | 0.00534 | 0.297685 | 0.195775 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016021 | false | 0 | 0.00534 | 0 | 0.093458 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
2f951710d02a1d12e0520bbecfbc998d77558296 | 344 | py | Python | tests/python/fixtures.py | gauteh/dars | b3e50e11ddfad97833eaab07dd93392441b54ed0 | [
"MIT"
] | 7 | 2020-01-14T16:48:05.000Z | 2021-09-30T03:59:30.000Z | tests/python/fixtures.py | gauteh/dars | b3e50e11ddfad97833eaab07dd93392441b54ed0 | [
"MIT"
] | 13 | 2020-01-18T12:19:48.000Z | 2020-09-08T08:25:47.000Z | tests/python/fixtures.py | gauteh/dars | b3e50e11ddfad97833eaab07dd93392441b54ed0 | [
"MIT"
] | 1 | 2020-01-18T12:03:52.000Z | 2020-01-18T12:03:52.000Z | import os
import pytest
@pytest.fixture
def dars():
return "http://localhost:8001/data/"
@pytest.fixture
def tds():
return "http://localhost:8002/thredds/dodsC/test/data/"
@pytest.fixture
def hyrax():
return "http://localhost:8080/opendap/"
@pytest.fixture
def data():
return os.path.join(os.path.dirname(__file__), "../../data/")
| 17.2 | 63 | 0.700581 | 47 | 344 | 5.042553 | 0.489362 | 0.219409 | 0.270042 | 0.168776 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.039216 | 0.110465 | 344 | 19 | 64 | 18.105263 | 0.735294 | 0 | 0 | 0.285714 | 0 | 0 | 0.332362 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | true | 0 | 0.142857 | 0.285714 | 0.714286 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
c0e51485003328b03a6ec3fea44895a14ede3bb3 | 16,622 | py | Python | tests/test_glucose_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 6 | 2020-04-08T15:17:58.000Z | 2021-06-04T06:47:15.000Z | tests/test_glucose_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 8 | 2019-08-29T01:38:41.000Z | 2021-03-11T22:58:07.000Z | tests/test_glucose_math.py | novalegra/PyLoopKit | c275ef9490b0c528841525bf1b501e9c8805b20a | [
"BSD-2-Clause"
] | 5 | 2019-09-03T21:51:14.000Z | 2021-01-20T04:15:37.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 11 2019
@author: annaquinlan
Github URL: https://github.com/tidepool-org/LoopKit/blob/
57a9f2ba65ae3765ef7baafe66b883e654e08391/LoopKitTests/GlucoseMathTests.swift
"""
# pylint: disable=C0111, C0411, R0201, W0105, W0612, C0200
# diable pylint warnings for too many arguments/variables and missing docstring
import unittest
from datetime import datetime
#from . import path_grabber # pylint: disable=unused-import
from .loop_kit_tests import load_fixture
from pyloopkit.glucose_math import linear_momentum_effect, counteraction_effects
class TestGlucoseKitFunctions(unittest.TestCase):
""" unittest class to run GlucoseKit tests."""
def load_input_fixture(self, resource_name):
""" Load input json file
Arguments:
resource_name -- name of file without the extension
Variable names:
fixture -- list of dictionaries; each dictionary contains properties
of a GlucoseFixtureValue
Output:
4 lists in (date, glucose_value,
display_only (for calibration purposes), providence_identifier) format
"""
fixture = load_fixture(resource_name, ".json")
dates = [datetime.fromisoformat(dict_.get("date"))
for dict_ in fixture]
glucose_values = [dict_.get("amount") for dict_ in fixture]
def get_boolean(dict_):
return dict_.get("display_only") in ("yes", "true", "True")
display_onlys = [get_boolean(dict_) for dict_ in fixture]
providences = [dict_.get("provenance_identifier")
or "com.loopkit.LoopKitTests"
for dict_ in fixture]
assert len(dates) == len(glucose_values) == len(display_onlys) ==\
len(providences), "expected output shape to match"
return (dates, glucose_values, display_onlys, providences)
def load_output_fixture(self, resource_name):
""" Load output json file
Arguments:
resource_name -- name of file without the extension
Output:
2 lists in (date, glucose_value) format
"""
fixture = load_fixture(resource_name, ".json")
dates = [datetime.fromisoformat(dict_.get("date"))
for dict_ in fixture]
glucose_values = [dict_.get("amount") for dict_ in fixture]
assert len(dates) == len(glucose_values),\
"expected output shape to match"
return (dates, glucose_values)
def load_effect_velocity_fixture(self, resource_name):
""" Load effect-velocity json file
Arguments:
resource_name -- name of file without the extension
Output:
3 lists in (start_date, end_date, glucose_effects) format
"""
fixture = load_fixture(resource_name, ".json")
start_dates = [datetime.fromisoformat(dict_.get("startDate"))
for dict_ in fixture]
end_dates = [datetime.fromisoformat(dict_.get("endDate"))
for dict_ in fixture]
glucose_effects = [dict_.get("value") for dict_ in fixture]
assert len(start_dates) == len(end_dates) == len(glucose_effects),\
"expected output shape to match"
return (start_dates, end_dates, glucose_effects)
""" Tests for linear_momentum_effect """
def test_momentum_effect_for_bouncing_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_bouncing_glucose_input"
)
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture(
"momentum_effect_bouncing_glucose_output"
)
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_rising_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture("momentum_effect_rising_glucose_input")
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture("momentum_effect_rising_glucose_output")
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_rising_glucose_doubles(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_rising_glucose_double_entries_input"
)
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture(
"momentum_effect_rising_glucose_output"
)
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_falling_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture("momentum_effect_falling_glucose_input")
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture(
"momentum_effect_falling_glucose_output"
)
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_falling_glucose_duplicates(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_falling_glucose_duplicate_input"
)
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture(
"momentum_effect_falling_glucose_output"
)
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_stable_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture("momentum_effect_stable_glucose_input")
(expected_date_list,
expected_glucose_list
) = self.load_output_fixture("momentum_effect_stable_glucose_output")
(glucose_effect_dates,
glucose_effect_values
) = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)
self.assertEqual(
len(expected_date_list), len(glucose_effect_dates)
)
for i in range(0, len(expected_date_list)):
self.assertEqual(
expected_date_list[i], glucose_effect_dates[i]
)
self.assertAlmostEqual(
glucose_effect_values[i], expected_glucose_list[i], 2
)
def test_momentum_effect_for_duplicate_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture("momentum_effect_duplicate_glucose_input")
glucose_effect_dates = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
def test_momentum_effect_for_empty_glucose(self):
glucose_effect_dates = linear_momentum_effect(
[], [], [], []
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
def test_momentum_effect_for_spaced_expected_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_incomplete_glucose_input"
)
glucose_effect_dates = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
def test_momentum_effect_for_too_few_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture("momentum_effect_bouncing_glucose_input")
glucose_effect_dates = linear_momentum_effect(
i_date_list[0:1],
i_glucose_list[0:1],
display_list[0:1],
providence_list[0:1]
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
def test_momentum_effect_for_display_only_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_display_only_glucose_input"
)
glucose_effect_dates = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
def test_momentum_effect_for_mixed_provenance_glucose(self):
(i_date_list,
i_glucose_list,
display_list,
providence_list
) = self.load_input_fixture(
"momentum_effect_mixed_provenance_glucose_input"
)
glucose_effect_dates = linear_momentum_effect(
i_date_list,
i_glucose_list,
display_list,
providence_list
)[0]
self.assertEqual(
0, len(glucose_effect_dates)
)
""" Tests for counteraction_effects """
def test_counteraction_effects_for_falling_glucose(self):
(i_dates,
i_glucoses,
displays,
provenances
) = self.load_input_fixture(
"counteraction_effect_falling_glucose_input"
)
(effect_dates,
effect_glucoses
) = self.load_output_fixture("momentum_effect_stable_glucose_output")
(expected_start_dates,
expected_end_dates,
expected_velocities
) = self.load_effect_velocity_fixture(
"counteraction_effect_falling_glucose_output"
)
(start_dates,
end_dates,
velocities
) = counteraction_effects(
i_dates,
i_glucoses,
displays,
provenances,
effect_dates,
effect_glucoses
)
self.assertEqual(
len(expected_start_dates), len(start_dates)
)
for i in range(0, len(expected_start_dates)):
self.assertEqual(
expected_start_dates[i], start_dates[i]
)
self.assertAlmostEqual(
expected_velocities[i], velocities[i], 2
)
def test_counteraction_effects_for_falling_glucose_duplicates(self):
(i_dates,
i_glucoses,
displays,
provenances
) = self.load_input_fixture(
"counteraction_effect_falling_glucose_double_entries_input"
)
(effect_dates,
effect_glucoses
) = self.load_output_fixture(
"counteraction_effect_falling_glucose_insulin"
)
(expected_start_dates,
expected_end_dates,
expected_velocities
) = self.load_effect_velocity_fixture(
"counteraction_effect_falling_glucose_output"
)
(start_dates,
end_dates,
velocities
) = counteraction_effects(
i_dates,
i_glucoses,
displays,
provenances,
effect_dates,
effect_glucoses
)
self.assertEqual(
len(expected_start_dates), len(start_dates)
)
for i in range(0, len(expected_start_dates)):
self.assertEqual(
expected_start_dates[i], start_dates[i]
)
self.assertAlmostEqual(
expected_velocities[i], velocities[i], 2
)
def test_counteraction_effects_for_falling_glucose_almost_duplicates(self):
(i_dates,
i_glucoses,
displays,
provenances
) = self.load_input_fixture(
"counteraction_effect_falling_glucose_almost_duplicates_input"
)
(effect_dates,
effect_glucoses
) = self.load_output_fixture(
"counteraction_effect_falling_glucose_insulin"
)
(expected_start_dates,
expected_end_dates,
expected_velocities
) = self.load_effect_velocity_fixture(
"counteraction_effect_falling_glucose_almost_duplicates_output"
)
(start_dates,
end_dates,
velocities
) = counteraction_effects(
i_dates,
i_glucoses,
displays,
provenances,
effect_dates,
effect_glucoses
)
self.assertEqual(
len(expected_start_dates), len(start_dates)
)
for i in range(0, len(expected_start_dates)):
self.assertEqual(
expected_start_dates[i], start_dates[i]
)
self.assertAlmostEqual(
expected_velocities[i], velocities[i], 2
)
def test_counteraction_effects_for_no_glucose(self):
(effect_dates,
effect_glucoses
) = self.load_output_fixture(
"counteraction_effect_falling_glucose_insulin"
)
(start_dates,
end_dates,
velocities
) = counteraction_effects(
[], [], [], [],
effect_dates,
effect_glucoses
)
self.assertEqual(
0, len(start_dates)
)
if __name__ == '__main__':
unittest.main()
| 29.682143 | 80 | 0.591265 | 1,660 | 16,622 | 5.470482 | 0.096988 | 0.040524 | 0.059465 | 0.047572 | 0.836252 | 0.808281 | 0.792314 | 0.774254 | 0.774254 | 0.753331 | 0 | 0.008931 | 0.339851 | 16,622 | 559 | 81 | 29.735242 | 0.818646 | 0.064192 | 0 | 0.705479 | 0 | 0 | 0.089833 | 0.078252 | 0 | 0 | 0 | 0 | 0.084475 | 1 | 0.045662 | false | 0 | 0.009132 | 0.002283 | 0.06621 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9b088e54933951303b2bb3e9538adf674a6ee977 | 2,176 | py | Python | AuthSrvr/utils/constants.py | anindya/license-pool | 2fb62c86c452947dacdfeb02b676e4a045e006d6 | [
"Apache-2.0"
] | null | null | null | AuthSrvr/utils/constants.py | anindya/license-pool | 2fb62c86c452947dacdfeb02b676e4a045e006d6 | [
"Apache-2.0"
] | null | null | null | AuthSrvr/utils/constants.py | anindya/license-pool | 2fb62c86c452947dacdfeb02b676e4a045e006d6 | [
"Apache-2.0"
] | null | null | null | MAX_LICENSES = 10
DEFAULT_USERS = [
{
'uname' : "Emma",
'password' : "abcd"
},
{
'uname' : "John",
'password' : "efgh"
},
{
'uname' : "Sam",
'password' : "ijkl"
}
]
#number of seconds after which if no ping for a license, revoke License
THRESHOLD = 30
PRIVATE_KEY='-----BEGIN PRIVATE KEY-----\nProc-Type: 4,ENCRYPTED\nDEK-Info: DES-EDE3-CBC,00B3AD9BF093B3D9\n\ntJ6qSqZYrycNjU40VcFty1LZkepDVDFHSdWSDwe9j5RmUKnrGebSos83rwrFRCX0\nkTR45QAK5kt912EapQVWy3Oh7zQpILO1stP8NWL/QCfxWLaAlnfXyGY6bi1+i+YG\n6vDU/dZ8xSOHd6Y/4mB36nHcMH2+gRU8VQnsxLd8wcQIf1uZyf7XmcyIxIqWfcNJ\ndrIgKK/sa/oULMqUfgCtYNvRi0r/70wZd7iwg5X2/Cyc02Zc90TAXz3CDwh8LXkY\nV+yTe+fU2VHxvad8lma54rp/Le1f8bqplc0s5a9X7Lb6unpOBWVYbSjE59RSlAW+\nqW+IGvTGinubWNwpDJYXcNVteRlBeFROzaVPqDZ34NmZSjg6QVSkFBe0y/aYsSJV\nHpbWiLBYIV6CqxTlPNFIxqNXbOPahJoGavBtr+Yh6AmGNIcxYI7HhUquGVkfYeqj\nXxY1EWmbG2KJIl0W4LlHvHtfCjWBASpMQ5ENvyA046ja2ZdobYpLsQ/mwTgKg2Dh\nchLhWKPjy8S2/uaol5OmRMWCrwA6AXtA8KYK0BUXVCKe4IFMix4SB498etz0bz4Z\nI9BZf8YJTfEeVDPcQOg0qsE3h3mtsaLWJvVhpWLRVR+fD00ukxG5CP6lTmBsko90\n2GQxkA/+U+pcPUEyLH4NG2QJ+wG/EExg8cHCJMdlIqM4dwlwEM3VbHre1mod/go1\nhMpXGpahYUr7Qm0+iJ4WwaJe7Lv1vgO/9KoyId3VRDdaP65YOKpwRZiyWCtZBhmn\nFhtefzyeBZH22Qm+zsFs2i4YaIGGTJ+B9rOMQJhqzuLN3eMQV9O77s/dBCLrzqFF\n37qEFQ1GVQq/njfzMt8mQVTE3J3e3AuYkvnTGQqJPMtOMr0VzndwPTB8e4Cj2oGf\nLhoEYJq4tYdD6Xk5TUNNoTwWqLoSe+eIAKqTUljPIxGLti13MfJYogAKdEVRNCpn\nG+lZ3tRfp8ZwXvy2MbR28ZYTNq5BQNIwcEBHxI0mRzOkCt3Ly/qWwHUQ3+6eThh6\nLmeKhKmxHMv/FnpuHccA49hdmce6lx/JEJkoRLty1uqzGCzr4ZDpTiUsV67srPLF\nFI0Xq9MrrbhEw96gvx55+J63Uo7sYMI5zv/9d748Pvgv+nR/MpxSWAtqcFGAj6qu\n9RyCy8xc+kN0ZPpgfQVr77yyyKYj7QWHx1z1UzeENWnUu11m9fn4q/NUsbHB/Myr\nNVaSYeVb6PaEiLn643wMnO5WgAOXiyWo43Vh4Oa3yZOubdHNYdouZx+bbbAEdMkF\nkHJ/4GoJmam77yUnWCw5i4alHDATOLfVkeyeSl0TDgxSIds7kSH2IHmH+2Tjw8Pa\nMuUTQXgeD/nfk/6IA7uSp2w7t49BDW7eox3Lz5CEto/pM+sQM00SJ2xk/heAR/Te\nKYSCsjuQirVsMAjDA9Wa9IRJtTLYKCy0hTa4JdzCItzv9hR8tIxUn/+2X0K3/Ngc\nyM8W/kVB17Ab6DWFTkVIv94NefOHN8SHb7JyPk7IyZkNlMbpjkmYWPr9QiTGXodv\njk4OD8QO4ja6roGQyMMQwUR6ooj9v4mb+4iNdfbwtYuCQZ7M8vHUL4vhkXj8zH1c\n0AzXedyWAHKWFCr9CRxESVoRqxKikTJO\n-----END PRIVATE KEY-----'
PRIVATE_KEY_PASSPHRASE="secret" | 114.526316 | 1,819 | 0.863051 | 138 | 2,176 | 13.572464 | 0.913043 | 0.021356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129331 | 0.058364 | 2,176 | 19 | 1,820 | 114.526316 | 0.784773 | 0.032169 | 0 | 0 | 0 | 0.055556 | 0.889364 | 0.829535 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
9b172c767c106662ef750fc483bb1c506aa8a5e1 | 7,300 | py | Python | test/filters/test_filters.py | alphagov/transactions-explorer | 1e780cd8b4f2923f527812e97c581ef2982b6077 | [
"MIT"
] | 1 | 2017-03-31T15:05:42.000Z | 2017-03-31T15:05:42.000Z | test/filters/test_filters.py | gds-attic/transactions-explorer | 1e780cd8b4f2923f527812e97c581ef2982b6077 | [
"MIT"
] | null | null | null | test/filters/test_filters.py | gds-attic/transactions-explorer | 1e780cd8b4f2923f527812e97c581ef2982b6077 | [
"MIT"
] | 2 | 2019-08-29T11:33:36.000Z | 2021-04-10T20:07:44.000Z | from hamcrest import assert_that, is_
from lib.filters import number_as_magnitude, number_as_financial_magnitude, join_url_parts, string_as_static_url, digest, number_as_grouped_number, number_as_percentage_change
def test_number_as_magnitude():
assert_that(number_as_magnitude(1.23), is_("1.23"))
assert_that(number_as_magnitude(1.234), is_("1.23"))
assert_that(number_as_magnitude(1.236), is_("1.24"))
assert_that(number_as_magnitude(12.3), is_("12.3"))
assert_that(number_as_magnitude(12.34), is_("12.3"))
assert_that(number_as_magnitude(12.36), is_("12.4"))
assert_that(number_as_magnitude(123), is_("123"))
assert_that(number_as_magnitude(123.4), is_("123"))
assert_that(number_as_magnitude(123.6), is_("124"))
assert_that(number_as_magnitude(1230), is_("1.23k"))
assert_that(number_as_magnitude(1234), is_("1.23k"))
assert_that(number_as_magnitude(1236), is_("1.24k"))
assert_that(number_as_magnitude(12300), is_("12.3k"))
assert_that(number_as_magnitude(12340), is_("12.3k"))
assert_that(number_as_magnitude(12360), is_("12.4k"))
assert_that(number_as_magnitude(123000), is_("123k"))
assert_that(number_as_magnitude(123400), is_("123k"))
assert_that(number_as_magnitude(123600), is_("124k"))
assert_that(number_as_magnitude(1230000), is_("1.23m"))
assert_that(number_as_magnitude(1234000), is_("1.23m"))
assert_that(number_as_magnitude(1236000), is_("1.24m"))
assert_that(number_as_magnitude(12300000), is_("12.3m"))
assert_that(number_as_magnitude(12340000), is_("12.3m"))
assert_that(number_as_magnitude(12360000), is_("12.4m"))
assert_that(number_as_magnitude(123000000), is_("123m"))
assert_that(number_as_magnitude(123400000), is_("123m"))
assert_that(number_as_magnitude(123600000), is_("124m"))
assert_that(number_as_magnitude(1230000000), is_("1.23bn"))
assert_that(number_as_magnitude(1234000000), is_("1.23bn"))
assert_that(number_as_magnitude(1236000000), is_("1.24bn"))
assert_that(number_as_magnitude(12300000000), is_("12.3bn"))
assert_that(number_as_magnitude(12340000000), is_("12.3bn"))
assert_that(number_as_magnitude(12360000000), is_("12.4bn"))
assert_that(number_as_magnitude(123000000000), is_("123bn"))
assert_that(number_as_magnitude(123400000000), is_("123bn"))
assert_that(number_as_magnitude(123600000000), is_("124bn"))
def test_number_as_financial_magnitude():
assert_that(number_as_financial_magnitude(1.23), is_("1.23"))
assert_that(number_as_financial_magnitude(1.234), is_("1.23"))
assert_that(number_as_financial_magnitude(1.236), is_("1.24"))
assert_that(number_as_financial_magnitude(12.33), is_("12.33"))
assert_that(number_as_financial_magnitude(12.334), is_("12.33"))
assert_that(number_as_financial_magnitude(12.336), is_("12.34"))
assert_that(number_as_financial_magnitude(123), is_("123"))
assert_that(number_as_financial_magnitude(123.4), is_("123"))
assert_that(number_as_financial_magnitude(123.6), is_("124"))
assert_that(number_as_financial_magnitude(1230), is_("1.23k"))
assert_that(number_as_financial_magnitude(1234), is_("1.23k"))
assert_that(number_as_financial_magnitude(1236), is_("1.24k"))
assert_that(number_as_financial_magnitude(12300), is_("12.3k"))
assert_that(number_as_financial_magnitude(12340), is_("12.3k"))
assert_that(number_as_financial_magnitude(12360), is_("12.4k"))
assert_that(number_as_financial_magnitude(123000), is_("123k"))
assert_that(number_as_financial_magnitude(123400), is_("123k"))
assert_that(number_as_financial_magnitude(123600), is_("124k"))
assert_that(number_as_financial_magnitude(1230000), is_("1.23m"))
assert_that(number_as_financial_magnitude(1234000), is_("1.23m"))
assert_that(number_as_financial_magnitude(1236000), is_("1.24m"))
assert_that(number_as_financial_magnitude(12300000), is_("12.3m"))
assert_that(number_as_financial_magnitude(12340000), is_("12.3m"))
assert_that(number_as_financial_magnitude(12360000), is_("12.4m"))
assert_that(number_as_financial_magnitude(123000000), is_("123m"))
assert_that(number_as_financial_magnitude(123400000), is_("123m"))
assert_that(number_as_financial_magnitude(123600000), is_("124m"))
assert_that(number_as_financial_magnitude(1230000000), is_("1.23bn"))
assert_that(number_as_financial_magnitude(1234000000), is_("1.23bn"))
assert_that(number_as_financial_magnitude(1236000000), is_("1.24bn"))
assert_that(number_as_financial_magnitude(12300000000), is_("12.3bn"))
assert_that(number_as_financial_magnitude(12340000000), is_("12.3bn"))
assert_that(number_as_financial_magnitude(12360000000), is_("12.4bn"))
assert_that(number_as_financial_magnitude(123000000000), is_("123bn"))
assert_that(number_as_financial_magnitude(123400000000), is_("123bn"))
assert_that(number_as_financial_magnitude(123600000000), is_("124bn"))
def test_number_as_grouped_number():
assert_that(number_as_grouped_number(123456789), is_("123,456,789"))
assert_that(number_as_grouped_number(123), is_("123"))
assert_that(number_as_grouped_number(4567.22), is_("4,567"))
assert_that(number_as_grouped_number(4567.98), is_("4,568"))
assert_that(number_as_grouped_number("not a number"), is_(""))
def test_number_as_percentage_change():
assert_that(number_as_percentage_change(None), is_("0%"))
assert_that(number_as_percentage_change(1.0), is_("0%"))
assert_that(number_as_percentage_change(1.00001), is_("0%"))
assert_that(number_as_percentage_change(0.999991), is_("0%"))
assert_that(number_as_percentage_change(0.0), is_("-100%"))
assert_that(number_as_percentage_change(2.0), is_("+100%"))
assert_that(number_as_percentage_change(1.1234567), is_("+12.35%"))
assert_that(number_as_percentage_change(0.1234567), is_("-87.65%"))
class Test_join_url_parts(object):
def test_string_as_link(self):
assert_that(
join_url_parts('/', 'some/path'),
is_('/some/path'))
def test_string_as_link_with_user_defined_path_prefix(self):
assert_that(
join_url_parts('/custom/prefix/', 'some/path'),
is_('/custom/prefix/some/path'))
def test_string_as_link_adds_trailing_slash_after_prefix(self):
assert_that(
join_url_parts('/custom/prefix', 'some/path'),
is_('/custom/prefix/some/path'))
def test_string_as_link_does_not_add_double_slashes(self):
assert_that(
join_url_parts('/custom/prefix/', '/some/path'),
is_('/custom/prefix/some/path'))
class Test_string_as_static_url:
def setUp(self):
digest.set_digests({})
def test_return_url_with_digest(self):
digest.set_digests({
'asset.css': 'asset-1425361275412.css'
})
assert_that(
string_as_static_url('asset.css'),
is_('https://assets.digital.cabinet-office.gov.uk/static/asset-1425361275412.css')
)
def test_fallback_to_plain_url_when_digest_is_unknown(self):
assert_that(
string_as_static_url('asset.css'),
is_('https://assets.digital.cabinet-office.gov.uk/static/asset.css')
)
| 43.712575 | 175 | 0.732055 | 1,039 | 7,300 | 4.666025 | 0.13667 | 0.153465 | 0.280528 | 0.315594 | 0.865305 | 0.857673 | 0.799711 | 0.755363 | 0.578589 | 0.153053 | 0 | 0.131801 | 0.123836 | 7,300 | 166 | 176 | 43.975904 | 0.626173 | 0 | 0 | 0.08871 | 0 | 0.008065 | 0.103699 | 0.013014 | 0 | 0 | 0 | 0 | 0.741935 | 1 | 0.08871 | false | 0 | 0.016129 | 0 | 0.120968 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f19e2fcc43558a133ddbd3589ffe7850bdaa80f8 | 35 | py | Python | ml_automation/readers/__init__.py | ChillBoss/ml_automation | 50d42b3cd5a3bb2f7a91e4c53bf3bbfe7a3b1741 | [
"MIT"
] | null | null | null | ml_automation/readers/__init__.py | ChillBoss/ml_automation | 50d42b3cd5a3bb2f7a91e4c53bf3bbfe7a3b1741 | [
"MIT"
] | null | null | null | ml_automation/readers/__init__.py | ChillBoss/ml_automation | 50d42b3cd5a3bb2f7a91e4c53bf3bbfe7a3b1741 | [
"MIT"
] | null | null | null | from .csv_reader import csv_reader
| 17.5 | 34 | 0.857143 | 6 | 35 | 4.666667 | 0.666667 | 0.642857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 35 | 1 | 35 | 35 | 0.903226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f1da3a61e3ba8b2db240bcaea5a4db4ae90ccec4 | 60,028 | py | Python | src/app_sav.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 126 | 2017-03-22T14:20:30.000Z | 2022-03-10T22:05:38.000Z | src/app_sav.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 5 | 2017-05-27T14:53:17.000Z | 2020-11-11T12:17:37.000Z | src/app_sav.py | ethuee/earthengine | 9786109a98ed3f7fcad630e1dd69f63591183a37 | [
"MIT"
] | 57 | 2017-03-23T18:22:06.000Z | 2022-03-17T06:56:38.000Z | import ee, getpass, time, math, sys
from flask import Flask, render_template, request
from eeMad import imad
from eeWishart import omnibus
ee.Initialize()
app = Flask(__name__, static_url_path='/static')
def simon(path):
images = ee.List(
[ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160305T171543_20160305T171608_010237_00F1FA_49DC')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160329T171543_20160329T171608_010587_00FBF9_B4DE')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160410T171538_20160410T171603_010762_010122_CEF6')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160422T171539_20160422T171604_010937_010677_03F6')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160504T171539_20160504T171604_011112_010BED_80AF')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160516T171540_20160516T171605_011287_011198_FC21')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160528T171603_20160528T171628_011462_011752_F570')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160609T171604_20160609T171629_011637_011CD1_C2F5')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160715T171605_20160715T171630_012162_012DA2_95A1')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160727T171606_20160727T171631_012337_013359_29A6')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160808T171607_20160808T171632_012512_01392E_44C4')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160901T171608_20160901T171633_012862_0144E3_30E5')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20160925T171609_20160925T171634_013212_015050_8FDB')),
ee.call("S1.dB",ee.Image(path+'S1B_IW_GRDH_1SDV_20161001T171508_20161001T171533_002316_003E9D_D195')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20161007T171609_20161007T171634_013387_0155CD_F513')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20161019T171609_20161019T171634_013562_015B60_27FF')),
ee.call("S1.dB",ee.Image(path+'S1A_IW_GRDH_1SDV_20161031T171609_20161031T171634_013737_0160BD_4FAE')) ] )
return ee.ImageCollection(images)
def simonf(path):
def sel(image):
return ee.Image(image).select(['VV','VH'])
images = ee.List(
[ee.Image(path+'S1A_IW_GRDH_1SDV_20160305T171543_20160305T171608_010237_00F1FA_49DC'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160329T171543_20160329T171608_010587_00FBF9_B4DE'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160410T171538_20160410T171603_010762_010122_CEF6'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160422T171539_20160422T171604_010937_010677_03F6'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160504T171539_20160504T171604_011112_010BED_80AF'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160516T171540_20160516T171605_011287_011198_FC21'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160528T171603_20160528T171628_011462_011752_F570'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160609T171604_20160609T171629_011637_011CD1_C2F5'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160715T171605_20160715T171630_012162_012DA2_95A1'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160727T171606_20160727T171631_012337_013359_29A6'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160808T171607_20160808T171632_012512_01392E_44C4'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160901T171608_20160901T171633_012862_0144E3_30E5'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20160925T171609_20160925T171634_013212_015050_8FDB'),
ee.Image(path+'S1B_IW_GRDH_1SDV_20161001T171508_20161001T171533_002316_003E9D_D195'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20161007T171609_20161007T171634_013387_0155CD_F513'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20161019T171609_20161019T171634_013562_015B60_27FF'),
ee.Image(path+'S1A_IW_GRDH_1SDV_20161031T171609_20161031T171634_013737_0160BD_4FAE') ] )
return ee.ImageCollection(images.map(sel))
@app.route('/')
def index():
return app.send_static_file('index.html')
def get_vv(image):
''' get 'VV' band from sentinel-1 imageCollection and restore linear signal from db-values '''
return image.select('VV').multiply(ee.Image.constant(math.log(10.0)/10.0)).exp()
def get_vh(image):
''' get 'VH' band from sentinel-1 imageCollection and restore linear signal from db-values '''
return image.select('VH').multiply(ee.Image.constant(math.log(10.0)/10.0)).exp()
def get_vvvh(image):
''' get 'VV' and 'VH' bands from sentinel-1 imageCollection and restore linear signal from db-values '''
return image.select('VV','VH').multiply(ee.Image.constant(math.log(10.0)/10.0)).exp()
def get_vvvh_raw(image):
return image.select('VV','VH')
def get_image(current,image):
''' accumulate a single image from a collection of images '''
return ee.Image.cat(ee.Image(image),current)
def clipList(current,prev):
imlist = ee.List(ee.Dictionary(prev).get('imlist'))
rect = ee.Dictionary(prev).get('rect')
imlist = imlist.add(ee.Image(current).clip(rect))
return ee.Dictionary({'imlist':imlist,'rect':rect})
@app.route('/sentinel1.html', methods = ['GET', 'POST'])
def Sentinel1():
if request.method == 'GET':
username = getpass.getuser()
return render_template('sentinel1.html', navbar = 'Hi there %s!'%username,
centerlon = 8.5,
centerlat = 50.05)
else:
try:
startdate = request.form['startdate']
enddate = request.form['enddate']
latitude = float(request.form['latitude'])
longitude = float(request.form['longitude'])
orbit = request.form['orbit']
polarization1 = request.form['polarization']
relativeorbitnumber = request.form['relativeorbitnumber']
if polarization1 == 'VV,VH':
polarization = ['VV','VH']
else:
polarization = polarization1
mode = request.form['mode']
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
how = request.form['how']
if request.form.has_key('export'):
export = request.form['export']
else:
export = 'none'
exportname = request.form['exportname']
start = ee.Date(startdate)
finish = ee.Date(enddate)
if how == 'longlat':
point = ee.Geometry.Point([longitude,latitude])
collection = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterBounds(point) \
.filterDate(start, finish) \
.filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
.filter(ee.Filter.eq('instrumentMode', mode)) \
.filter(ee.Filter.eq('resolution_meters', 10)) \
.filter(ee.Filter.eq('orbitProperties_pass', orbit))
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
image = ee.Image(collection.first())
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(image,exportname,
{'scale':10,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
if (polarization1 == 'VV') or (polarization1 == 'VV,VH'):
projection = image.select('VV').projection().getInfo()['crs']
else:
projection = image.select('VH').projection().getInfo()['crs']
downloadpath = image.getDownloadUrl({'scale':1000})
im = get_vv(image)
mapid = im.getMapId({'min':0, 'max':1, 'opacity': 0.5})
return render_template('sentinel1out.html',
mapidclip = mapid['mapid'],
tokenclip = mapid['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = longitude,
centerlat = latitude,
downloadtext = '',
downloadpath = downloadpath,
downloadpathclip = downloadpath,
polarization = polarization1,
projection = projection,
gdexportid = gdexportid,
systemid = systemid,
count = count,
timestamp = timestamp)
elif how=='box':
# overlaps box
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
collection = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start, finish) \
.filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
.filter(ee.Filter.eq('resolution_meters', 10)) \
.filter(ee.Filter.eq('instrumentMode', mode)) \
.filter(ee.Filter.eq('orbitProperties_pass', orbit))
# test_collection = simonf('TEST/simonf/S1/raw/')
# collection = test_collection \
# .filterBounds(ulPoint) \
# .filterBounds(lrPoint) \
# .filterDate(start, finish) \
# .filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
# .filter(ee.Filter.eq('resolution_meters', 10)) \
# .filter(ee.Filter.eq('instrumentMode', mode)) \
# .filter(ee.Filter.eq('orbitProperties_pass', orbit))
if relativeorbitnumber != 'ANY':
collection = collection.filter(ee.Filter.eq('relativeOrbitNumber_start', int(relativeorbitnumber)))
collection = collection.sort('system:time_start')
system_ids = ee.List(collection.aggregate_array('system:id'))
systemidlist = []
for systemid in system_ids.getInfo():
systemidlist.append(systemid)
systemids = str(systemidlist)
acquisition_times = ee.List(collection.aggregate_array('system:time_start'))
count = acquisition_times.length().getInfo()
if count==0:
raise ValueError('No images found')
timestamplist = []
for timestamp in acquisition_times.getInfo():
tmp = time.gmtime(int(timestamp)/1000)
timestamplist.append(time.strftime('%c', tmp))
timestamp = timestamplist[0]
timestamps = str(timestamplist)
relative_orbit_numbers = ee.List(collection.aggregate_array('relativeOrbitNumber_start'))
relativeorbitnumberlist = []
for ron in relative_orbit_numbers.getInfo():
relativeorbitnumberlist.append(ron)
relativeorbitnumbers = str(relativeorbitnumberlist)
image = ee.Image(collection.first())
systemid = image.get('system:id').getInfo()
if (polarization1 == 'VV') or (polarization1 == 'VV,VH'):
projection = image.select('VV').projection().getInfo()['crs']
else:
projection = image.select('VH').projection().getInfo()['crs']
# make into collection of VV, VH or VVVH images and restore linear scale
if polarization == 'VV':
pcollection = collection.map(get_vv)
elif polarization == 'VH':
pcollection = collection.map(get_vh)
else:
pcollection = collection.map(get_vvvh)
# pcollection = collection.map(get_vvvh_raw)
# clipped image for display on map
image1 = ee.Image(pcollection.first())
image1clip = image1.clip(rect)
downloadpath = image1.getDownloadUrl({'scale':30})
# clip the image collection and create a single multiband image
compositeimage = ee.Image(pcollection.iterate(get_image,image1clip))
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(compositeimage,exportname,
{'scale':10,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpathclip = compositeimage.getDownloadUrl({'scale':10})
if (polarization1 == 'VV') or (polarization1 == 'VV,VH'):
mapid = image1.select('VV').getMapId({'min': 0, 'max':1, 'opacity': 0.6})
mapidclip = image1clip.select('VV').getMapId({'min': 0, 'max':1, 'opacity': 0.7})
else:
mapid = image1.select('VH').getMapId({'min': 0, 'max':1, 'opacity': 0.6})
mapidclip = image1clip.select('VH').getMapId({'min': 0, 'max':1, 'opacity': 0.7})
return render_template('sentinel1out.html',
mapidclip = mapidclip['mapid'],
tokenclip = mapidclip['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadtext = 'Download image collection intersection',
downloadpath = downloadpath,
downloadpathclip = downloadpathclip,
projection = projection,
systemid = systemid,
count = count,
timestamp = timestamp,
gdexportid = gdexportid,
timestamps = timestamps,
systemids = systemids,
polarization = polarization1,
relativeorbitnumbers = relativeorbitnumbers)
except Exception as e:
return '<br />An error occurred in Sentinel1: %s'%e
@app.route('/sentinel2.html', methods = ['GET', 'POST'])
def Sentinel2():
if request.method == 'GET':
username = getpass.getuser()
return render_template('sentinel2.html', navbar = 'Hi there %s!'%username)
else:
try:
startdate = request.form['startdate']
enddate = request.form['enddate']
desired_projection = request.form['projection']
latitude = float(request.form['latitude'])
longitude = float(request.form['longitude'])
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
if request.form.has_key('export'):
export = request.form['export']
else:
export = ' '
exportname = request.form['exportname']
how = request.form['how']
start = ee.Date(startdate)
finish = ee.Date(enddate)
if how == 'longlat':
point = ee.Geometry.Point([longitude,latitude])
elements = ee.ImageCollection('COPERNICUS/S2') \
.filterBounds(point) \
.filterDate(start, finish) \
.sort('CLOUD_COVERAGE_ASSESSMENT', True)
count = elements.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
element = elements.first()
image = ee.Image(element)
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
cloudcover = image.get('CLOUD_COVERAGE_ASSESSMENT').getInfo()
projection = image.select('B2').projection().getInfo()['crs']
if desired_projection != 'default':
projection = desired_projection
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(image,exportname,
{'scale':10,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpath = image.getDownloadUrl({'scale':30,'crs':projection})
mapid = image.select('B2','B3','B4') \
.getMapId({'min': 0, 'max': 2000, 'opacity': 0.8})
return render_template('sentinel2out.html',
mapidclip = mapid['mapid'],
tokenclip = mapid['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = longitude,
centerlat = latitude,
downloadtext = '',
downloadpath = downloadpath,
downloadpathclip = downloadpath,
projection = projection,
systemid = systemid,
cloudcover = cloudcover,
count = count,
timestamp = timestamp)
elif how=='box':
# overlaps box
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
collection = ee.ImageCollection('COPERNICUS/S2') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start, finish) \
.sort('CLOUD_COVERAGE_ASSESSMENT', True)
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
image = ee.Image(collection.first())
imageclip = image.clip(rect)
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
cloudcover = image.get('CLOUD_COVERAGE_ASSESSMENT').getInfo()
projection = image.select('B2').projection().getInfo()['crs']
if desired_projection != 'default':
projection = desired_projection
downloadpath = image.getDownloadUrl({'scale':30,'crs':projection})
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(imageclip.select('B2','B3','B4','B8'),exportname,
{'scale':10,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpathclip = imageclip.select('B2','B3','B4','B8').getDownloadUrl({'scale':10, 'crs':projection})
rgb = image.select('B2','B3','B4')
rgbclip = imageclip.select('B2','B3','B5')
mapid = rgb.getMapId({'min':0, 'max':2000, 'opacity': 0.6})
mapidclip = rgbclip.getMapId({'min':0, 'max':2000, 'opacity': 1.0})
return render_template('sentinel2out.html',
mapidclip = mapidclip['mapid'],
tokenclip = mapidclip['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadtext = 'Download image intersection',
downloadpath = downloadpath,
downloadpathclip = downloadpathclip,
projection = projection,
systemid = systemid,
cloudcover = cloudcover,
count = count,
timestamp = timestamp)
except Exception as e:
return '<br />An error occurred in Sentinel2: %s'%e
@app.route('/landsat5.html', methods = ['GET', 'POST'])
def Landsat5():
if request.method == 'GET':
username = getpass.getuser()
return render_template('landsat5.html', navbar = 'Hi there %s!'%username)
else:
try:
startdate = request.form['startdate']
enddate = request.form['enddate']
path = int(request.form['path'])
row = int(request.form['row'])
latitude = float(request.form['latitude'])
longitude = float(request.form['longitude'])
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
how = request.form['how']
if request.form.has_key('export'):
export = request.form['export']
else:
export = ' '
exportname = request.form['exportname']
start = ee.Date(startdate)
finish = ee.Date(enddate)
if how == 'pathrow':
elements = ee.ImageCollection('LT5_L1T') \
.filterMetadata('WRS_PATH', 'equals', path) \
.filterMetadata('WRS_ROW', 'equals', row) \
.filterDate(start, finish) \
.sort('CLOUD_COVER', True)
count = elements.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
element = elements.first()
image = ee.Image(element)
longitude = (image.get('CORNER_LL_LON_PRODUCT').getInfo()+image.get('CORNER_UR_LON_PRODUCT').getInfo())/2
latitude = (image.get('CORNER_UR_LAT_PRODUCT').getInfo()+image.get('CORNER_LL_LAT_PRODUCT').getInfo())/2
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
projection = image.select('B2').projection().getInfo()['crs']
cloudcover = image.get('CLOUD_COVER').getInfo()
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(image,exportname,
{'scale':30,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpath = image.getDownloadUrl({'scale':30, 'crs':'EPSG:4326'})
rgb = image.select('B4','B5','B7')
mapid = rgb.getMapId({'min':0, 'max':250, 'opacity': 0.6})
return render_template('landsat5out.html',
mapidclip = mapid['mapid'],
tokenclip = mapid['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = longitude,
centerlat = latitude,
downloadtext = '',
downloadpath = downloadpath,
downloadpathclip = downloadpath,
projection = projection,
systemid = systemid,
cloudcover = cloudcover,
count = count,
timestamp = timestamp)
elif how == 'longlat':
point = ee.Geometry.Point([longitude,latitude])
elements = ee.ImageCollection('LT5_L1T') \
.filterBounds(point) \
.filterDate(start, finish) \
.sort('CLOUD_COVER', True)
count = elements.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
element = elements.first()
image = ee.Image(element)
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
cloudcover = image.get('CLOUD_COVER').getInfo()
projection = image.select('B2').projection().getInfo()['crs']
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(image,exportname,
{'scale':30,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpath = image.getDownloadUrl({'scale':30,'crs':projection})
mapid = image.select('B4','B5','B7') \
.getMapId({'min': 0, 'max': 250, 'opacity': 0.6})
return render_template('landsat5out.html',
mapidclip = mapid['mapid'],
tokenclip = mapid['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = longitude,
centerlat = latitude,
downloadtext = '',
downloadpath = downloadpath,
downloadpathclip = downloadpath,
projection = projection,
systemid = systemid,
cloudcove = cloudcover,
count = count,
timestamp = timestamp)
elif how=='box':
# overlaps box
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
collection = ee.ImageCollection('LT5_L1T') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start, finish) \
.sort('CLOUD_COVER', True)
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found')
image = ee.Image(collection.first())
imageclip = image.clip(rect)
timestamp = ee.Date(image.get('system:time_start')).getInfo()
timestamp = time.gmtime(int(timestamp['value'])/1000)
timestamp = time.strftime('%c', timestamp)
systemid = image.get('system:id').getInfo()
cloudcover = image.get('CLOUD_COVER').getInfo()
projection = image.select('B1').projection().getInfo()['crs']
downloadpath = image.getDownloadUrl({'scale':30,'crs':projection})
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(imageclip,exportname,
{'scale':30,'driveFolder':'EarthEngineImages','maxPixels': 1e9})
gdexportid = str(gdexport.id)
print >> sys.stderr, '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
downloadpathclip = imageclip.select('B1','B2','B3','B4','B5','B7').getDownloadUrl({'scale':30, 'crs':projection})
rgb = image.select('B4','B5','B7')
rgbclip = imageclip.select('B4','B5','B7')
mapid = rgb.getMapId({'min':0, 'max':250, 'opacity': 0.6})
mapidclip = rgbclip.getMapId({'min':0, 'max':250, 'opacity': 1.0})
return render_template('landsat5out.html',
mapidclip = mapidclip['mapid'],
tokenclip = mapidclip['token'],
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadtext = 'Download image intersection',
downloadpath = downloadpath,
downloadpathclip = downloadpathclip,
projection = projection,
systemid = systemid,
cloudcover = cloudcover,
count = count,
timestamp = timestamp)
except Exception as e:
return '<br />An error occurred in Landsat5: %s'%e
@app.route('/mad.html', methods = ['GET', 'POST'])
def Mad():
if request.method == 'GET':
username = getpass.getuser()
return render_template('mad.html', navbar = 'Hi there %s!'%username)
else:
try:
path = int(request.form['path'])
row = int(request.form['row'])
niter = int(request.form['iterations'])
start1 = ee.Date(request.form['startdate1'])
finish1 = ee.Date(request.form['enddate1'])
start2 = ee.Date(request.form['startdate2'])
finish2 = ee.Date(request.form['enddate2'])
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
exportname = request.form['exportname']
how = request.form['how']
if request.form.has_key('export'):
export = request.form['export']
else:
export = ' '
if how == 'pathrow':
element = ee.ImageCollection('LT5_L1T') \
.filterMetadata('WRS_PATH', 'equals', path) \
.filterMetadata('WRS_ROW', 'equals', row) \
.filterDate(start1, finish1) \
.sort('CLOUD_COVER') \
.first()
image1 = ee.Image(element).select('B1','B2','B3','B4','B5','B7')
timestamp1 = ee.Date(image1.get('system:time_start')).getInfo()
timestamp1 = time.gmtime(int(timestamp1['value'])/1000)
timestamp1 = time.strftime('%c', timestamp1)
systemid1 = image1.get('system:id').getInfo()
cloudcover1 = image1.get('CLOUD_COVER').getInfo()
centerlon = (image1.get('CORNER_LL_LON_PRODUCT').getInfo()+image1.get('CORNER_UR_LON_PRODUCT').getInfo())/2
centerlat = (image1.get('CORNER_UR_LAT_PRODUCT').getInfo()+image1.get('CORNER_LL_LAT_PRODUCT').getInfo())/2
element = ee.ImageCollection('LT5_L1T') \
.filterMetadata('WRS_PATH', 'equals', path) \
.filterMetadata('WRS_ROW', 'equals', row) \
.filterDate(start2, finish2) \
.sort('CLOUD_COVER') \
.first()
image2 = ee.Image(element).select('B1','B2','B3','B4','B5','B7')
timestamp2 = ee.Date(image2.get('system:time_start')).getInfo()
timestamp2 = time.gmtime(int(timestamp2['value'])/1000)
timestamp2 = time.strftime('%c', timestamp2)
systemid2 = image2.get('system:id').getInfo()
cloudcover2 = image2.get('CLOUD_COVER').getInfo()
elif how=='box':
# overlaps box
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
collection = ee.ImageCollection('LT5_L1T') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start1, finish1) \
.sort('CLOUD_COVER', True)
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found for first time interval')
image1 = ee.Image(collection.first()).clip(rect).select('B1','B2','B3','B4','B5','B7')
timestamp1 = ee.Date(image1.get('system:time_start')).getInfo()
timestamp1 = time.gmtime(int(timestamp1['value'])/1000)
timestamp1 = time.strftime('%c', timestamp1)
systemid1 = image1.get('system:id').getInfo()
cloudcover1 = image1.get('CLOUD_COVER').getInfo()
collection = ee.ImageCollection('LT5_L1T') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start2, finish2) \
.sort('CLOUD_COVER', True)
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found for second time interval')
image2 = ee.Image(collection.first()).clip(rect).select('B1','B2','B3','B4','B5','B7')
timestamp2 = ee.Date(image2.get('system:time_start')).getInfo()
timestamp2 = time.gmtime(int(timestamp2['value'])/1000)
timestamp2 = time.strftime('%c', timestamp2)
systemid2 = image2.get('system:id').getInfo()
cloudcover2 = image2.get('CLOUD_COVER').getInfo()
# iMAD:
B1 = image1.bandNames().get(0)
input_dict = ee.Dictionary({'image1':image1,'image2':image2})
first = ee.Dictionary({'weights':image1.select(ee.String(B1)).multiply(0).add(ee.Image.constant(1)),
'MAD':ee.Image.constant(0)})
# iteration not yet possible, but this is how it goes:
# result = ee.List.repeat(input_dict, nMax).iterate(imad,first)
# fake iteration:
itr = 0
while itr < niter:
result = imad(input_dict,first)
weights = result.get('weights')
first = ee.Dictionary({'weights':weights,'MAD':ee.Image.constant(0)})
itr += 1
# ---------------
MAD = ee.Image(result.get('MAD'))
bNames = MAD.bandNames()
nBands = len(bNames.getInfo())
lastMAD = ee.String(MAD.bandNames().get(nBands-1))
scale = image1.select(ee.String(B1)).projection().nominalScale().getInfo()
downloadpath = MAD.getDownloadUrl({'scale':scale, 'crs':'EPSG:4326'})
mapid = MAD.select(lastMAD).getMapId({'min': -20, 'max': 20, 'opacity': 0.7})
if export == 'export':
# export to Google Drive --------------------------
gdexport = ee.batch.Export.image(MAD,exportname,
{'scale':scale,'driveFolder':'EarthEngineImages'})
gdexportid = str(gdexport.id)
print '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
return render_template('madout.html',
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadpath = downloadpath,
systemid1 = systemid1,
systemid2 = systemid2,
cloudcover1 = cloudcover1,
cloudcover2 = cloudcover2,
timestamp1 = timestamp1,
timestamp2 = timestamp2)
except Exception as e:
return '<br />An error occurred in MAD: %s'%e
@app.route('/wishart.html', methods = ['GET', 'POST'])
def Wishart():
if request.method == 'GET':
username = getpass.getuser()
return render_template('wishart.html', navbar = 'Hi there %s!'%username)
else:
try:
start1 = ee.Date(request.form['startdate1'])
finish1 = ee.Date(request.form['enddate1'])
start2 = ee.Date(request.form['startdate2'])
finish2 = ee.Date(request.form['enddate2'])
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
orbit = request.form['orbit']
polarization1 = request.form['polarization']
relativeorbitnumber = request.form['relativeorbitnumber']
significance = float(request.form['significance'])
if polarization1 == 'VV,VH':
polarization = ['VV','VH']
else:
polarization = polarization1
exportname = request.form['exportname']
if request.form.has_key('export'):
export = request.form['export']
else:
export = ' '
if request.form.has_key('median'):
median = True
else:
median = False
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
# get the first time point image
collection = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start1, finish1) \
.filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
.filter(ee.Filter.eq('resolution_meters', 10)) \
.filter(ee.Filter.eq('instrumentMode', 'IW')) \
.filter(ee.Filter.eq('orbitProperties_pass', orbit))
if relativeorbitnumber != 'ANY':
collection = collection.filter(ee.Filter.eq('relativeOrbitNumber_start', int(relativeorbitnumber)))
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found for first time interval')
collection = collection.sort('system:time_start')
image1 = ee.Image(collection.first()).clip(rect)
timestamp1 = ee.Date(image1.get('system:time_start')).getInfo()
timestamp1= time.gmtime(int(timestamp1['value'])/1000)
timestamp1 = time.strftime('%c', timestamp1)
systemid1 = image1.get('system:id').getInfo()
relativeOrbitNumber1 = int(image1.get('relativeOrbitNumber_start').getInfo())
# get the second time point image
collection = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start2, finish2) \
.filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
.filter(ee.Filter.eq('resolution_meters', 10)) \
.filter(ee.Filter.eq('instrumentMode', 'IW')) \
.filter(ee.Filter.eq('orbitProperties_pass', orbit))
if relativeorbitnumber != 'ANY':
collection = collection.filter(ee.Filter.eq('relativeOrbitNumber_start', int(relativeorbitnumber)))
count = collection.toList(100).length().getInfo()
if count==0:
raise ValueError('No images found for second time interval')
collection = collection.sort('system:time_start')
image2 = ee.Image(collection.first()).clip(rect)
timestamp2 = ee.Date(image2.get('system:time_start')).getInfo()
timestamp2= time.gmtime(int(timestamp2['value'])/1000)
timestamp2 = time.strftime('%c', timestamp2)
systemid2 = image2.get('system:id').getInfo()
relativeOrbitNumber2 = int(image2.get('relativeOrbitNumber_start').getInfo())
# Wishart change detection
if polarization1=='VV,VH':
image1 = get_vvvh(image1)
image2 = get_vvvh(image2)
elif polarization1=='VV':
image1 = get_vv(image1)
image2 = get_vv(image2)
else:
image1 = get_vh(image1)
image2 = get_vh(image2)
result = ee.Dictionary(omnibus(ee.List([image1,image2]),significance,median))
cmap = ee.Image(result.get('cmap'))
mapid = cmap.getMapId({'min':0, 'max':1 ,'palette':'black,red', 'opacity':0.4})
downloadpath = cmap.getDownloadUrl({'scale':10})
if export == 'export':
# export to Assets ---------------------------------
assexport = ee.batch.Export.image.toAsset(cmap,description="wishartTask", assetId=exportname,scale=10,maxPixels=1e9)
assexportid = str(assexport.id)
print '****Exporting to Assets, task id: %s '%assexportid
assexport.start()
else:
assexportid = 'none'
# --------------------------------------------------
return render_template('wishartout.html',
mapid = mapid['mapid'],
token = mapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadpath = downloadpath,
systemid1 = systemid1,
systemid2 = systemid2,
timestamp1 = timestamp1,
timestamp2 = timestamp2,
relativeOrbitNumber1 = relativeOrbitNumber1,
relativeOrbitNumber2 = relativeOrbitNumber2,
significance = significance,
polarization = polarization1,
assexportid = assexportid)
except Exception as e:
return '<br />An error occurred in wishart: %s'%e
@app.route('/omnibus.html', methods = ['GET', 'POST'])
def Omnibus():
if request.method == 'GET':
username = getpass.getuser()
return render_template('omnibus.html', navbar = 'Hi there %s!'%username,
centerlon = 8.5,
centerlat = 50.05)
else:
try:
startdate = request.form['startdate']
enddate = request.form['enddate']
orbit = request.form['orbit']
polarization1 = request.form['polarization']
relativeorbitnumber = request.form['relativeorbitnumber']
if polarization1 == 'VV,VH':
polarization = ['VV','VH']
else:
polarization = polarization1
significance = float(request.form['significance'])
mode = request.form['mode']
minLat = float(request.form['minLat'])
minLon = float(request.form['minLon'])
maxLat = float(request.form['maxLat'])
maxLon = float(request.form['maxLon'])
if request.form.has_key('assexport'):
assexport = request.form['assexport']
else:
assexport = 'none'
if request.form.has_key('gdexport'):
gdexport = request.form['gdexport']
else:
gdexport = 'none'
if request.form.has_key('median'):
median = True
else:
median = False
assexportname = request.form['assexportname']
gdexportname = request.form['gdexportname']
start = ee.Date(startdate)
finish = ee.Date(enddate)
rect = ee.Geometry.Rectangle(minLon,minLat,maxLon,maxLat)
centerlon = (minLon + maxLon)/2.0
centerlat = (minLat + maxLat)/2.0
ulPoint = ee.Geometry.Point([minLon,maxLat])
lrPoint = ee.Geometry.Point([maxLon,minLat])
collection = ee.ImageCollection('COPERNICUS/S1_GRD') \
.filterBounds(ulPoint) \
.filterBounds(lrPoint) \
.filterDate(start, finish) \
.filter(ee.Filter.eq('transmitterReceiverPolarisation', polarization)) \
.filter(ee.Filter.eq('resolution_meters', 10)) \
.filter(ee.Filter.eq('instrumentMode', mode)) \
.filter(ee.Filter.eq('orbitProperties_pass', orbit))
if relativeorbitnumber != 'ANY':
collection = collection.filter(ee.Filter.eq('relativeOrbitNumber_start', int(relativeorbitnumber)))
collection = collection.sort('system:time_start')
system_ids = ee.List(collection.aggregate_array('system:id'))
systemidlist = []
for systemid in system_ids.getInfo():
systemidlist.append(systemid)
systemids = str(systemidlist)
acquisition_times = ee.List(collection.aggregate_array('system:time_start'))
count = acquisition_times.length().getInfo()
if count==0:
raise ValueError('No images found')
timestamplist = []
for timestamp in acquisition_times.getInfo():
tmp = time.gmtime(int(timestamp)/1000)
timestamplist.append(time.strftime('%c', tmp))
timestamp = timestamplist[0]
timestamps = str(timestamplist)
relative_orbit_numbers = ee.List(collection.aggregate_array('relativeOrbitNumber_start'))
relativeorbitnumberlist = []
for ron in relative_orbit_numbers.getInfo():
relativeorbitnumberlist.append(ron)
relativeorbitnumbers = str(relativeorbitnumberlist)
image = ee.Image(collection.first())
systemid = image.get('system:id').getInfo()
if (polarization1 == 'VV') or (polarization1 == 'VV,VH'):
projection = image.select('VV').projection().getInfo()['crs']
else:
projection = image.select('VH').projection().getInfo()['crs']
# make into collection of VV, VH or VVVH images and restore linear scale
if polarization == 'VV':
pcollection = collection.map(get_vv)
elif polarization == 'VH':
pcollection = collection.map(get_vh)
else:
pcollection = collection.map(get_vvvh)
# get the list of images and clip to roi
pList = pcollection.toList(count)
first = ee.Dictionary({'imlist':ee.List([]),'rect':rect})
imList = ee.Dictionary(pList.iterate(clipList,first)).get('imlist')
# run the algorithm
result = ee.Dictionary(omnibus(imList,significance,median))
cmap = ee.Image(result.get('cmap'))
smap = ee.Image(result.get('smap'))
fmap = ee.Image(result.get('fmap'))
cmaps = ee.Image.cat(cmap,smap,fmap).rename(['cmap','smap','fmap'])
if assexport == 'assexport':
# export to Assets ---------------------------------
assexport = ee.batch.Export.image.toAsset(cmaps,
description='assetExportTask',
assetId=assexportname,scale=10,maxPixels=1e9)
assexportid = str(assexport.id)
print '****Exporting to Assets, task id: %s '%assexportid
assexport.start()
else:
assexportid = 'none'
if gdexport == 'gdexport':
# export to Drive ----------------------------------
gdexport = ee.batch.Export.image.toDrive(cmaps,
description='driveExportTask',
folder = 'EarthEngineImages',
fileNamePrefix=gdexportname,scale=10,maxPixels=1e9)
gdexportid = str(gdexport.id)
print '****Exporting to Google Drive, task id: %s '%gdexportid
gdexport.start()
else:
gdexportid = 'none'
# --------------------------------------------------
cmapid = cmap.getMapId({'min': 0, 'max':count-1,'palette':'black,blue,yellow,red', 'opacity': 0.5})
fmapid = fmap.getMapId({'min': 0, 'max':count/2,'palette':'black,blue,yellow,red', 'opacity': 0.5})
smapid = smap.getMapId({'min': 0, 'max':count-1,'palette':'black,blue,yellow,red', 'opacity': 0.5})
return render_template('omnibusout.html',
mapid = fmapid['mapid'],
token = fmapid['token'],
centerlon = centerlon,
centerlat = centerlat,
downloadtext = 'Download change maps',
projection = projection,
systemid = systemid,
count = count,
timestamp = timestamp,
assexportid = 'none',
gdexportid = 'none',
timestamps = timestamps,
systemids = systemids,
polarization = polarization1,
relativeorbitnumbers = relativeorbitnumbers)
except Exception as e:
return '<br />An error occurred in omnibus: %s'%e
if __name__ == '__main__':
# import ee
# image = ee.apifunction.ApiFunction.call_("S1.db",ee.Image('TEST/simonf/S1/99/S1B_IW_GRDH_1SDV_20161001T171508_20161001T171533_002316_003E9D_D195'))
app.run(debug=True, host='0.0.0.0')
| 59.908184 | 155 | 0.469114 | 4,799 | 60,028 | 5.76766 | 0.097312 | 0.036562 | 0.012645 | 0.016186 | 0.860544 | 0.840782 | 0.821598 | 0.799234 | 0.781676 | 0.761263 | 0 | 0.060766 | 0.410309 | 60,028 | 1,002 | 156 | 59.908184 | 0.721171 | 0.055308 | 0 | 0.764967 | 0 | 0 | 0.142002 | 0.052658 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.013304 | 0.004435 | null | null | 0.012195 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f1f4486e6215f0f7223604a86776684b0e1a7666 | 6,814 | py | Python | project/migrations/0047_historicalrseallocation_rseallocation.py | mmesiti/cogs3 | c48cd48629570f418b93aec73de49bc2fb59edc2 | [
"MIT"
] | null | null | null | project/migrations/0047_historicalrseallocation_rseallocation.py | mmesiti/cogs3 | c48cd48629570f418b93aec73de49bc2fb59edc2 | [
"MIT"
] | 9 | 2019-08-01T09:50:34.000Z | 2019-08-14T16:24:31.000Z | project/migrations/0047_historicalrseallocation_rseallocation.py | mmesiti/cogs3 | c48cd48629570f418b93aec73de49bc2fb59edc2 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.2 on 2018-08-15 12:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('project', '0046_merge_20180807_1432'),
]
operations = [
migrations.CreateModel(
name='HistoricalRSEAllocation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('title', models.CharField(help_text='A one-sentence summary of the work to be done by the RSE team.', max_length=256, verbose_name='Subproject title')),
('software', models.TextField(help_text='The software currently in use to deliver the research outcomes. Questions to consider: Is it commercial, open-source, or in-house code? What language is it written in? What libraries does it depend on? Is it parallelised?', max_length=2000, verbose_name='Software description')),
('duration', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Estimated duration (in weeks)')),
('goals', models.TextField(help_text='Describe in as much detail as possible what you would like the RSE team to achieve. What steps are necessary to achieve this?', max_length=5000, verbose_name='Project goals')),
('outcomes', models.TextField(help_text='What effect will the completion of this work have on your research, e.g. in terms of publications or grants enabled? ', max_length=2000, verbose_name='Project outcomes')),
('confidentiality', models.TextField(blank=True, help_text='Is the research or code restricted from being published openly and presented at conferences and other events related to research software? If so, please describe the restrictions.', max_length=1000, verbose_name='Confidentiality constraints')),
('status', models.PositiveSmallIntegerField(choices=[(0, 'Awaiting Approval'), (1, 'Approved; awaiting RSE time'), (2, 'Declined'), (3, 'In progress'), (4, 'Completed'), (5, 'Closed')], default=0, verbose_name='Current Status')),
('previous_status', models.PositiveSmallIntegerField(choices=[(0, 'Awaiting Approval'), (1, 'Approved; awaiting RSE time'), (2, 'Declined'), (3, 'In progress'), (4, 'Completed'), (5, 'Closed')], default=0, verbose_name='Previous Status')),
('reason_decision', models.TextField(blank=True, max_length=256, verbose_name='Reason for the RSE allocation status decision:')),
('notes', models.TextField(blank=True, help_text='Internal notes', max_length=512, verbose_name='Notes')),
('created_time', models.DateTimeField(blank=True, editable=False)),
('modified_time', models.DateTimeField(blank=True, editable=False)),
('started_date', models.DateTimeField(null=True)),
('completed_date', models.DateTimeField(null=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_date', models.DateTimeField()),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('project', models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='project.Project')),
],
options={
'verbose_name': 'historical rse allocation',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
),
migrations.CreateModel(
name='RSEAllocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='A one-sentence summary of the work to be done by the RSE team.', max_length=256, verbose_name='Subproject title')),
('software', models.TextField(help_text='The software currently in use to deliver the research outcomes. Questions to consider: Is it commercial, open-source, or in-house code? What language is it written in? What libraries does it depend on? Is it parallelised?', max_length=2000, verbose_name='Software description')),
('duration', models.DecimalField(decimal_places=1, max_digits=5, verbose_name='Estimated duration (in weeks)')),
('goals', models.TextField(help_text='Describe in as much detail as possible what you would like the RSE team to achieve. What steps are necessary to achieve this?', max_length=5000, verbose_name='Project goals')),
('outcomes', models.TextField(help_text='What effect will the completion of this work have on your research, e.g. in terms of publications or grants enabled? ', max_length=2000, verbose_name='Project outcomes')),
('confidentiality', models.TextField(blank=True, help_text='Is the research or code restricted from being published openly and presented at conferences and other events related to research software? If so, please describe the restrictions.', max_length=1000, verbose_name='Confidentiality constraints')),
('status', models.PositiveSmallIntegerField(choices=[(0, 'Awaiting Approval'), (1, 'Approved; awaiting RSE time'), (2, 'Declined'), (3, 'In progress'), (4, 'Completed'), (5, 'Closed')], default=0, verbose_name='Current Status')),
('previous_status', models.PositiveSmallIntegerField(choices=[(0, 'Awaiting Approval'), (1, 'Approved; awaiting RSE time'), (2, 'Declined'), (3, 'In progress'), (4, 'Completed'), (5, 'Closed')], default=0, verbose_name='Previous Status')),
('reason_decision', models.TextField(blank=True, max_length=256, verbose_name='Reason for the RSE allocation status decision:')),
('notes', models.TextField(blank=True, help_text='Internal notes', max_length=512, verbose_name='Notes')),
('created_time', models.DateTimeField(auto_now_add=True)),
('modified_time', models.DateTimeField(auto_now=True)),
('started_date', models.DateTimeField(null=True)),
('completed_date', models.DateTimeField(null=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project.Project')),
],
),
]
| 98.753623 | 336 | 0.672292 | 820 | 6,814 | 5.462195 | 0.243902 | 0.056486 | 0.025452 | 0.03081 | 0.804644 | 0.771378 | 0.76468 | 0.749721 | 0.732753 | 0.732753 | 0 | 0.021323 | 0.194746 | 6,814 | 68 | 337 | 100.205882 | 0.79497 | 0.006604 | 0 | 0.516129 | 1 | 0.129032 | 0.404611 | 0.010049 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.048387 | 0 | 0.096774 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9e60ed95bd434a15121fd58b4193851f9c1a526d | 93 | py | Python | invenio_oarepo_oai_pmh_harvester/utils.py | Semtexcz/invenio-oarepo-oai-pmh-harvester | 2866c7d7355f6885b4f443ee1e82baa24502b36e | [
"MIT"
] | null | null | null | invenio_oarepo_oai_pmh_harvester/utils.py | Semtexcz/invenio-oarepo-oai-pmh-harvester | 2866c7d7355f6885b4f443ee1e82baa24502b36e | [
"MIT"
] | null | null | null | invenio_oarepo_oai_pmh_harvester/utils.py | Semtexcz/invenio-oarepo-oai-pmh-harvester | 2866c7d7355f6885b4f443ee1e82baa24502b36e | [
"MIT"
] | null | null | null | from collections import defaultdict
def infinite_dd():
return defaultdict(infinite_dd)
| 15.5 | 35 | 0.795699 | 11 | 93 | 6.545455 | 0.727273 | 0.277778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.150538 | 93 | 5 | 36 | 18.6 | 0.911392 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
9e772fe90bf1fdd96c6267b76fa96c221c0f657b | 3,233 | py | Python | poly.py | The-SocialLion/Pumpkin-Price-and-Pumpkin-category-prediction-using-Polynomial-Regression | e9fb29857ef36c1cff2ac6e0525748a5ff9e5c0e | [
"Apache-2.0"
] | null | null | null | poly.py | The-SocialLion/Pumpkin-Price-and-Pumpkin-category-prediction-using-Polynomial-Regression | e9fb29857ef36c1cff2ac6e0525748a5ff9e5c0e | [
"Apache-2.0"
] | null | null | null | poly.py | The-SocialLion/Pumpkin-Price-and-Pumpkin-category-prediction-using-Polynomial-Regression | e9fb29857ef36c1cff2ac6e0525748a5ff9e5c0e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Untitled31.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1w9AU0syLBdQF7Gd9WCgPvalJhefOk20U
"""
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('atlanta_9-24-2016_9-30-2017.csv')
dataset['Average Price']=((dataset['Low Price']+dataset['High Price'])/2)
dataset=dataset.drop(columns=[ 'Commodity Name','City Name','Type','Sub Variety','Grade','Date','Low Price','High Price','Mostly Low','Mostly High','Origin District','Color','Environment','Unit of Sale','Quality','Condition','Appearance','Storage','Crop','Repack','Trans Mode'])
dataset=dataset.dropna(how='any')
y=dataset.iloc[:,-1].values
X=dataset.iloc[:,:-1].values
from sklearn.preprocessing import LabelEncoder
le=LabelEncoder()
X[:,0]=le.fit_transform(X[:,0])
X[:,1]=le.fit_transform(X[:,1])
X[:,2]=le.fit_transform(X[:,2])
X[:,3]=le.fit_transform(X[:,3])
print(X)
print(y)
y=y.reshape(len(y),1)
print(y)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 1)
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
poly_reg = PolynomialFeatures(degree = 9)
X_train = poly_reg.fit_transform(X_train)
X_test = poly_reg.fit_transform(X_test)
lin_reg_2 = LinearRegression()
lin_reg_2.fit(X_train, y_train)
y_pred =lin_reg_2.predict(X_test)
np.set_printoptions(precision=2)
print(np.concatenate((y_pred.reshape(len(y_pred),1), y_test.reshape(len(y_test),1)),1))
"""Note: this is one way of implmenting the algorithm based on the size and other features now let us try the reverse method and see if it performs as expected"""
dataset = pd.read_csv('atlanta_9-24-2016_9-30-2017.csv')
dataset['Average Price']=((dataset['Low Price']+dataset['High Price'])/2)
dataset['Size']=dataset['Item Size']#making anew column size as the last column so that preprocessing would be easy
dataset=dataset.drop(columns=[ 'Item Size','Commodity Name','City Name','Type','Sub Variety','Grade','Date','Low Price','High Price','Mostly Low','Mostly High','Origin District','Color','Environment','Unit of Sale','Quality','Condition','Appearance','Storage','Crop','Repack','Trans Mode'])
dataset=dataset.dropna(how='any')
X=dataset.iloc[:,:-1].values
y=dataset.iloc[:,-1].values
from sklearn.preprocessing import LabelEncoder
le=LabelEncoder()
X[:,0]=le.fit_transform(X[:,0])
X[:,1]=le.fit_transform(X[:,1])
X[:,2]=le.fit_transform(X[:,2])
y=le.fit_transform(y)
print(X)
print(y)
y=y.reshape(len(y),1)
print(y)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 1)
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
poly_reg = PolynomialFeatures(degree = 9)
X_train = poly_reg.fit_transform(X_train)
X_test = poly_reg.fit_transform(X_test)
lin_reg_2 = LinearRegression()
lin_reg_2.fit(X_train, y_train)
y_pred =lin_reg_2.predict(X_test)
np.set_printoptions(precision=2)
print(np.concatenate((y_pred.reshape(len(y_pred),1), y_test.reshape(len(y_test),1)),1)) | 38.035294 | 290 | 0.752242 | 529 | 3,233 | 4.434783 | 0.272212 | 0.061381 | 0.060955 | 0.044757 | 0.786019 | 0.768968 | 0.768968 | 0.768968 | 0.768968 | 0.768968 | 0 | 0.027591 | 0.08073 | 3,233 | 85 | 291 | 38.035294 | 0.761777 | 0.082277 | 0 | 0.866667 | 1 | 0 | 0.185913 | 0.022167 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.183333 | 0 | 0.183333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9e820b762a0a64a4382b951f596808d5364f2755 | 64 | py | Python | batchviewer/__init__.py | stalhabukhari/BatchViewer | 88cfd6738c07f64e4dc91aafdf1a5842702a64e5 | [
"Apache-2.0"
] | 51 | 2017-12-21T09:48:13.000Z | 2021-12-31T14:38:33.000Z | batchviewer/__init__.py | stalhabukhari/BatchViewer | 88cfd6738c07f64e4dc91aafdf1a5842702a64e5 | [
"Apache-2.0"
] | 4 | 2019-08-18T23:01:22.000Z | 2021-01-06T02:22:17.000Z | batchviewer/__init__.py | stalhabukhari/BatchViewer | 88cfd6738c07f64e4dc91aafdf1a5842702a64e5 | [
"Apache-2.0"
] | 9 | 2019-07-05T15:57:36.000Z | 2021-04-30T16:02:30.000Z |
from .batchviewer import *
from .batchviewer import view_batch
| 16 | 35 | 0.8125 | 8 | 64 | 6.375 | 0.625 | 0.588235 | 0.823529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140625 | 64 | 3 | 36 | 21.333333 | 0.927273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9e8f719c924d06c9996350653bca06a9f7cf2981 | 125 | py | Python | odapi/tests/interfaces/__init__.py | jlandercy/odapi | 781aa95ef346f8d5f1d727a19ae078687cc4cc36 | [
"BSD-3-Clause"
] | 1 | 2020-05-27T08:33:26.000Z | 2020-05-27T08:33:26.000Z | odapi/tests/interfaces/__init__.py | jlandercy/odapi | 781aa95ef346f8d5f1d727a19ae078687cc4cc36 | [
"BSD-3-Clause"
] | null | null | null | odapi/tests/interfaces/__init__.py | jlandercy/odapi | 781aa95ef346f8d5f1d727a19ae078687cc4cc36 | [
"BSD-3-Clause"
] | null | null | null | from odapi.tests.interfaces.test_interfaces_timeserie import *
from odapi.tests.interfaces.test_interfaces_geomatic import *
| 41.666667 | 62 | 0.872 | 16 | 125 | 6.5625 | 0.5 | 0.171429 | 0.266667 | 0.457143 | 0.72381 | 0.72381 | 0 | 0 | 0 | 0 | 0 | 0 | 0.064 | 125 | 2 | 63 | 62.5 | 0.897436 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
7b35f5792b8f4a96006044245de8c72e0dccd4ea | 116,529 | py | Python | main.py | JacobLiebenow/Waves-In-Motion | 0678829bc1c93cf870f4b926872d6f3beeec43fc | [
"Apache-2.0"
] | null | null | null | main.py | JacobLiebenow/Waves-In-Motion | 0678829bc1c93cf870f4b926872d6f3beeec43fc | [
"Apache-2.0"
] | null | null | null | main.py | JacobLiebenow/Waves-In-Motion | 0678829bc1c93cf870f4b926872d6f3beeec43fc | [
"Apache-2.0"
] | null | null | null | #Written by: Jacob S Liebenow
#Version: 0.2.2
#Stage: Alpha
#
#
#
#This program is designed primarily to organize venues, bands, bookers, etc. into workable data. If
#possible, this data will be analyzed for future use, potentially using data mining. All data will be
#uploaded to Google Sheets/Drive, and can be managed from there. However, in this initial state, I
#want to work with local data to make sure flow of said data is correct.
#
#It's possible the directories could be better managed than this, but because of interchangability of
#the classes and data types, and early stage of development I've decided to organize them into a single
#datacls directory for the time being.
#Import Kivy GUI Framework
import kivy
from kivy.app import App
from kivy.uix.button import Label
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.gridlayout import GridLayout
from kivy.uix.widget import Widget
from kivy.uix.textinput import TextInput
from kivy.properties import ObjectProperty
from kivy.uix.button import Button
from kivy.uix.popup import Popup
from kivy.uix.checkbox import CheckBox
from kivy.uix.spinner import Spinner
from kivy.uix.spinner import SpinnerOption
from kivy.uix.scrollview import ScrollView
from kivy.config import Config
#Import MapView
from mapview import MapView
from mapview import MapMarker
#Import geopy
from geopy.geocoders import Nominatim
#Import Calendar classes
import datetime
import sys
import codecs
#Import custom classes
from datacls import state
from datacls import city
from datacls import venue
from datacls import datacenter
from datacls import dayinfo
from datacls import contact
from datacls import organization
Config.set("input", "mouse", "mouse,multitouch_on_demand")
kivy.require('1.9.0')
Builder.load_string("""
<ScreenMainMenu>:
orientation: "vertical"
padding: 40
spacing: 40
RelativeLayout:
size_hint_y: 0.75
pos_hint: {"center_x": 0.5, "top": 1}
Label:
text: "Welcome to Waves In Motion"
RelativeLayout:
orientation: "vertical"
padding: 10
size_hint_y: 0.25
pos_hint: {"center_x": 0.5, "y": 0}
Button:
text: "Database Manager"
pos_hint: {"center_x": 0.5, "top": 1}
size_hint_x: 0.5
size_hint_y: 0.5
on_press:
root.manager.transition.direction = "left"
root.manager.transition.duration = 0.5
root.manager.current = "screen_database"
Button:
text: "Calendar"
pos_hint: {"center_x": 0.5, "bottom": 0}
size_hint_x: 0.5
size_hint_y: 0.5
on_press:
root.manager.transition.direction = "left"
root.manager.transition.duration = 0.5
root.manager.current = "screen_calendar"
<ScreenDatabase>
orientation: "vertical"
padding: 20
spacing: 40
RelativeLayout:
RelativeLayout:
size_hint_y: 0.02
pos_hint: {"center_x": 0.5, "top": 1}
RelativeLayout:
size_hint_y: 0.18
pos_hint: {"center_x": 0.5, "top": 0.98}
Label:
halign: "center"
valign: "center"
text: "Database Manager"
Button:
size_hint_x: None
size_hint_y: None
height:"40dp"
length:"40dp"
text: "Back"
pos_hint: {"right": 0.99, "top": 1}
on_press:
root.manager.transition.direction = "right"
root.manager.transition.duration = 0.5
root.manager.current = "screen_mainmenu"
DatabaseManagementDatabaseLinkView:
size_hint_y: 0.3
size_hint_x: 0.8
pos_hint: {"center_x": 0.5, "bottom": 0}
DatabaseViewer:
size_hint_x: 1
size_hint_y: 0.8
<ScreenCalendar>
orientation: "vertical"
padding: 20
spacing: 40
RelativeLayout:
RelativeLayout:
size_hint_y: 0.02
pos_hint: {"center_x": 0.5, "top": 1}
RelativeLayout:
size_hint_y: 0.18
pos_hint: {"center_x": 0.5, "top": 0.98}
Label:
halign: "center"
valign: "center"
text: "Calendar"
Button:
size_hint_x: None
size_hint_y: None
height:"40dp"
length:"40dp"
text: "Back"
pos_hint: {"right": 0.99, "top": 1}
on_press:
root.manager.transition.direction = "right"
root.manager.transition.duration = 0.5
root.manager.current = "screen_mainmenu"
CalendarDatabaseLinkView:
size_hint_y: 0.3
size_hint_x: 0.8
pos_hint: {"center_x": 0.5, "bottom": 0}
CalendarViewer:
""")
#Create the base calendar class for use as a widget in the Calendar screen (Model)
class Calendar():
def __init__(self, tday):
#Create the general data structure of the calendar - 6 rows of 7 days each
self.fillerDate = datetime.date.today()
self.tdelta = datetime.timedelta(days=1)
self.row1 = [self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate]
self.row2 = []
self.row3 = []
self.row4 = []
self.row5 = []
self.row6 = []
self.calendarObj = [self.row1, self.row2, self.row3, self.row4, self.row5, self.row6]
self.today = tday
self.rowIncrementerIndex = 0
self.day = datetime.date(tday.year,tday.month,1)
self.dayPlaceholder = datetime.date(tday.year,tday.month,1)
self.currentYear = self.day.year
self.currentMonth = self.day.month
#Populate calendar for the current month
self.row1[self.day.weekday()] = self.day
if self.day.weekday() != 0:
while self.day.weekday() != 0:
self.day -= self.tdelta
self.row1[self.day.weekday()] = self.day
self.day = self.dayPlaceholder
while self.day.weekday() != 6:
self.day += self.tdelta
self.row1[self.day.weekday()] = self.day
self.rowIncrementerIndex += 1
self.day += self.tdelta
while self.rowIncrementerIndex != 6:
while self.day.weekday() != 6:
self.calendarObj[self.rowIncrementerIndex].append(self.day)
self.day += self.tdelta
if self.rowIncrementerIndex != 6:
self.calendarObj[self.rowIncrementerIndex].append(self.day)
self.day += self.tdelta
self.rowIncrementerIndex += 1
#Reset the row incrementer so the decrement and increment month functions can reuse variable
self.rowIncrementerIndex = 0
#Create flat calendar data
self.calendarObjFlat = []
for row in self.calendarObj:
for day in row:
self.calendarObjFlat.append(day)
def update(self, day):
#Reset the general data structure of the calendar
self.fillerDate = datetime.date.today()
self.tdelta = datetime.timedelta(days=1)
self.row1 = [self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate, self.fillerDate]
self.row2 = []
self.row3 = []
self.row4 = []
self.row5 = []
self.row6 = []
self.calendarObj = [self.row1, self.row2, self.row3, self.row4, self.row5, self.row6]
self.rowIncrementerIndex = 0
self.day = day
self.dayPlaceholder = day
#Populate calendar for the chosen month
self.row1[self.day.weekday()] = self.day
if self.day.weekday() != 0:
while self.day.weekday() != 0:
self.day -= self.tdelta
self.row1[self.day.weekday()] = self.day
self.day = self.dayPlaceholder
while self.day.weekday() != 6:
self.day += self.tdelta
self.row1[self.day.weekday()] = self.day
self.rowIncrementerIndex += 1
self.day += self.tdelta
while self.rowIncrementerIndex != 6:
while self.day.weekday() != 6:
self.calendarObj[self.rowIncrementerIndex].append(self.day)
self.day += self.tdelta
if self.rowIncrementerIndex != 6:
self.calendarObj[self.rowIncrementerIndex].append(self.day)
self.day += self.tdelta
self.rowIncrementerIndex += 1
#Reset the row incrementer so the decrement and increment month functions can reuse variable
self.rowIncrementerIndex = 0
#Update flat calendar data
self.calendarObjFlat = []
for row in self.calendarObj:
for day in row:
self.calendarObjFlat.append(day)
#Self-explanatory functions to increment/decrement the month when a button is pressed
def incrementMonth(self):
self.currentMonth += 1
if self.currentMonth > 12:
self.currentMonth = 1
self.currentYear += 1
day = datetime.date(self.currentYear, self.currentMonth, 1)
self.update(day)
def decrementMonth(self):
self.currentMonth -= 1
if self.currentMonth < 1:
self.currentMonth = 12
self.currentYear -= 1
day = datetime.date(self.currentYear, self.currentMonth, 1)
self.update(day)
#Returns the Calendar Month as a string (1 = January, 2 = February, etc.)
def currentMonthString(self):
if self.currentMonth == 1:
return "January"
elif self.currentMonth == 2:
return "February"
elif self.currentMonth == 3:
return "March"
elif self.currentMonth == 4:
return "April"
elif self.currentMonth == 5:
return "May"
elif self.currentMonth == 6:
return "June"
elif self.currentMonth == 7:
return "July"
elif self.currentMonth == 8:
return "August"
elif self.currentMonth == 9:
return "September"
elif self.currentMonth == 10:
return "October"
elif self.currentMonth == 11:
return "November"
elif self.currentMonth == 12:
return "December"
else:
return "MONTH NOT FOUND"
#Multiple classes will share the same data-centric object information, thus they're instantiated here
today = datetime.date.today()
calendar = Calendar(today)
datacenter = datacenter.Datacenter("DummyLink")
credentials = None
#Provide an overall view for the Calendar for managing data provided from the Calendar object (View)
class CalendarViewer(RelativeLayout):
#Define the general layout of the internal widgets of the Calendar
dateManagerLayout = RelativeLayout(size_hint = (0.3, 1), pos_hint = {"left": 0, "top": 1})
monthViewer = RelativeLayout(size_hint_y = 0.15, pos_hint = {"center_x":0.5, "top":1})
dateViewer = GridLayout(cols = 7, size_hint_y = 0.35, pos_hint = {"center_x":0.5, "top":0.85})
dateInfoViewer = RelativeLayout(size_hint_y = 0.45, pos_hint = {"center_x":0.5, "top":0.5})
dateInfoButtons = BoxLayout(size_hint_y = 0.05, pos_hint = {"center_x":0.5, "bottom":0})
mapViewer = RelativeLayout (size_hint = (0.7, 1), pos_hint = {"right": 1, "top": 1})
geolocator = Nominatim()
def __init__(self, **kwargs):
self.size_hint_y = 0.8
self.pos_hint = {"center_x": 0.5, "bottom": 0}
self.selectedState = None
self.selectedCity = None
self.selectedVenue = None
self.selectedContact = None
self.selectedOrganization = None
self.selectedDate = None
self.statePicked = False
self.cityPicked = False
self.venuePicked = False
self.locations = []
super(CalendarViewer, self).__init__(**kwargs)
#Add the month viewer to the layout
self.add_widget(self.dateManagerLayout)
self.dateManagerLayout.add_widget(self.monthViewer)
self.monthViewer.backButton = Button(text = "<", pos_hint = {"left":0,"center_y":0.5}, size_hint_x = 0.25, size_hint_y = 0.5)
self.monthViewer.backButton.bind(on_press = self.decrementMonth)
self.monthViewer.add_widget(self.monthViewer.backButton)
self.monthViewer.monthSelected = Label(text = (calendar.currentMonthString()+", "+str(calendar.currentYear)))
self.monthViewer.add_widget(self.monthViewer.monthSelected)
self.monthViewer.forwardButton = Button(text = ">", pos_hint = {"right":1,"center_y":0.5}, size_hint_x = 0.25, size_hint_y = 0.5)
self.monthViewer.forwardButton.bind(on_press = self.incrementMonth)
self.monthViewer.add_widget(self.monthViewer.forwardButton)
#Add the date viewer to the layout
self.dateManagerLayout.add_widget(self.dateViewer)
self.dateViewer.monLabel = Label(text = "Mo")
self.dateViewer.add_widget(self.dateViewer.monLabel)
self.dateViewer.tueLabel = Label(text = "Tu")
self.dateViewer.add_widget(self.dateViewer.tueLabel)
self.dateViewer.wedLabel = Label(text = "We")
self.dateViewer.add_widget(self.dateViewer.wedLabel)
self.dateViewer.thursLabel = Label(text = "Th")
self.dateViewer.add_widget(self.dateViewer.thursLabel)
self.dateViewer.friLabel = Label(text = "Fr")
self.dateViewer.add_widget(self.dateViewer.friLabel)
self.dateViewer.satLabel = Label(text = "Sa")
self.dateViewer.add_widget(self.dateViewer.satLabel)
self.dateViewer.sunLabel = Label(text = "Su")
self.dateViewer.add_widget(self.dateViewer.sunLabel)
for day in calendar.calendarObjFlat:
if day.month < calendar.currentMonth:
if day.year <= calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.1,0.1,0.1,1])
self.dateViewer.newButton.bind(on_press = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.year > calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.2,0.2,0.2,1])
self.dateViewer.newButton.bind(on_press = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.month > calendar.currentMonth:
if day.year >= calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.2,0.2,0.2,1])
self.dateViewer.newButton.bind(on_press = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.year < calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.1,0.1,0.1,1])
self.dateViewer.newButton.bind(on_press = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
else:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0,0.4,0.3,1])
self.dateViewer.newButton.bind(on_press = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
#Add the day info viewer to the layout
self.dateManagerLayout.add_widget(self.dateInfoViewer)
self.dateNameLabel = Label(text = "Day Info", size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
self.dateInfoVenueNameLabel = Label(text = "Venues:", size_hint = (0.25, 0.1), pos_hint = {"center_x": 0.5, "top": 0.9})
self.dateInfoVenueScrollView = ScrollView(size_hint = (1, 0.3), pos_hint = {"center_x": 0.5, "top": 0.8})
self.dateInfoVenueText = Label(size_hint = (1, None))
self.dateInfoVenueText.bind(width = lambda *x: self.dateInfoVenueText.setter("text_size")(self.dateInfoVenueText, (self.dateInfoVenueText.width, None)), texture_size = lambda *x: self.dateInfoVenueText.setter("height")(self.dateInfoVenueText, self.dateInfoVenueText.texture_size[1]))
self.dateInfoVenueScrollView.add_widget(self.dateInfoVenueText)
self.dateInfoContactNameLabel = Label(text = "Contacts:", size_hint = (0.25, 0.1), pos_hint = {"center_x": 0.5, "top": 0.5})
self.dateInfoContactScrollView = ScrollView(size_hint = (1, 0.3), pos_hint = {"center_x": 0.5, "top": 0.4})
self.dateInfoContactText = Label(size_hint = (1, None))
self.dateInfoContactText.bind(width = lambda *x: self.dateInfoContactText.setter("text_size")(self.dateInfoContactText, (self.dateInfoContactText.width, None)), texture_size = lambda *x: self.dateInfoContactText.setter("height")(self.dateInfoContactText, self.dateInfoContactText.texture_size[1]))
self.dateInfoContactScrollView.add_widget(self.dateInfoContactText)
self.dateInfoViewer.add_widget(self.dateNameLabel)
self.dateInfoViewer.add_widget(self.dateInfoVenueNameLabel)
self.dateInfoViewer.add_widget(self.dateInfoVenueScrollView)
self.dateInfoViewer.add_widget(self.dateInfoContactNameLabel)
self.dateInfoViewer.add_widget(self.dateInfoContactScrollView)
self.dateManagerLayout.add_widget(self.dateInfoButtons)
self.newDateButton = Button(text = "New/Edit", size_hint_x = 0.33, on_press = self.addOrChangeDate)
self.remDateButton = Button(text = "Remove", size_hint_x = 0.33, on_press = self.removeDate)
self.expandDateButton = Button(text = "Expand", size_hint_x = 0.33, on_press = self.expandDate)
self.dateInfoButtons.add_widget(self.newDateButton)
self.dateInfoButtons.add_widget(self.remDateButton)
self.dateInfoButtons.add_widget(self.expandDateButton)
#Add the map view to the layout
self.add_widget(self.mapViewer)
self.map = MapView(lat = 50.6394, lon = 3.057, zoom = 8)
self.mapViewer.add_widget(self.map)
#Remake the dates based off the new month selection
def updateMonth(self):
self.monthViewer.monthSelected.text = (calendar.currentMonthString()+", "+str(calendar.currentYear))
self.dateViewer.clear_widgets()
self.dateViewer.monLabel = Label(text = "Mo")
self.dateViewer.add_widget(self.dateViewer.monLabel)
self.dateViewer.tueLabel = Label(text = "Tu")
self.dateViewer.add_widget(self.dateViewer.tueLabel)
self.dateViewer.wedLabel = Label(text = "We")
self.dateViewer.add_widget(self.dateViewer.wedLabel)
self.dateViewer.thursLabel = Label(text = "Th")
self.dateViewer.add_widget(self.dateViewer.thursLabel)
self.dateViewer.friLabel = Label(text = "Fr")
self.dateViewer.add_widget(self.dateViewer.friLabel)
self.dateViewer.satLabel = Label(text = "Sa")
self.dateViewer.add_widget(self.dateViewer.satLabel)
self.dateViewer.sunLabel = Label(text = "Su")
self.dateViewer.add_widget(self.dateViewer.sunLabel)
for day in calendar.calendarObjFlat:
if day.month < calendar.currentMonth:
if day.year <= calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.1,0.1,0.1,1])
self.dateViewer.newButton.bind(on_release = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.year > calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.2,0.2,0.2,1])
self.dateViewer.newButton.bind(on_release = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.month > calendar.currentMonth:
if day.year >= calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.2,0.2,0.2,1])
self.dateViewer.newButton.bind(on_release = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
elif day.year < calendar.currentYear:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0.1,0.1,0.1,1])
self.dateViewer.newButton.bind(on_release = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
else:
self.dateViewer.newButton = Button(text = str(day.day), background_normal = "", background_color = [0,0.4,0.3,1])
self.dateViewer.newButton.bind(on_release = self.dateSelector)
self.dateViewer.add_widget(self.dateViewer.newButton)
def incrementMonth(self,instance):
calendar.incrementMonth()
self.updateMonth()
def decrementMonth(self,instance):
calendar.decrementMonth()
self.updateMonth()
#Obtain the date from the date button pressed
def dateSelector(self,instance):
monthSelected = calendar.currentMonth
yearSelected = calendar.currentYear
if instance.background_color == [0,0.4,0.3,1]:
self.dateNameLabel.text = (str(monthSelected)+"/"+instance.text+"/"+str(yearSelected))
elif instance.background_color == [0.1,0.1,0.1,1]:
monthSelected -= 1
if monthSelected < 1:
monthSelected = 12
yearSelected -= 1
self.dateNameLabel.text = (str(monthSelected)+"/"+instance.text+"/"+str(yearSelected))
elif instance.background_color == [0.2,0.2,0.2,1]:
monthSelected += 1
if monthSelected > 12:
monthSelected = 1
yearSelected += 1
self.dateNameLabel.text = (str(monthSelected)+"/"+instance.text+"/"+str(yearSelected))
if datacenter.linkValid is True:
dateFound = datacenter.dateFinder(self.dateNameLabel.text)
self.dateInfoVenueText.text = ""
self.dateInfoContactText.text = ""
if dateFound is True:
population = datacenter.populateDate(self.dateNameLabel.text)
self.selectedDate = datacenter.selectDate(self.dateNameLabel.text)
self.locations = []
locationIndex = 0
for venue in self.selectedDate.venues:
if venue.address != "N/A" and venue.cityName != "N/A" and venue.stateName != "N/A":
location = self.geolocator.geocode(venue.address+" "+venue.cityName+" "+venue.stateName)
if location is not None:
self.locations.append(location)
else:
popupLabel = Label(text = "Address added has an illegal character.\n\n\nLocation not shown on map, but still accessible in expanded view.")
popup = Popup(title = "Address Not Plotted", content = popupLabel, size_hint = (0.7, 0.45))
popup.open()
if self.dateInfoVenueText.text == "":
self.dateInfoVenueText.text = venue.venueName
else:
self.dateInfoVenueText.text += (", "+venue.venueName)
for contact in self.selectedDate.contacts:
if self.dateInfoContactText.text == "":
self.dateInfoContactText.text = contact.name
else:
self.dateInfoContactText.text += (", "+contact.name)
for organization in self.selectedDate.organizations:
if self.dateInfoContactText.text == "":
self.dateInfoContactText.text = organization.organizationName
else:
self.dateInfoContactText.text += (", "+organization.organizationName)
for location in self.locations:
if locationIndex == 0:
self.mapViewer.remove_widget(self.map)
self.map = MapView(lat = location.latitude, lon = location.longitude, zoom = 10)
self.mapViewer.add_widget(self.map)
locationIndex += 1
marker = MapMarker(lat = location.latitude, lon = location.longitude)
self.map.add_widget(marker)
#Add a date's info if it's not there, or edit it if it is
def addOrChangeDate(self, instance):
if self.dateNameLabel.text != "Day Info" and datacenter.linkValid is True:
dateFound = datacenter.dateFinder(self.dateNameLabel.text)
#Edit the given day
if dateFound is True:
population = datacenter.populateDate(self.dateNameLabel.text)
self.selectedDate = datacenter.selectDate(self.dateNameLabel.text)
#Create a popup with which to edit the day's associated info
self.popupLayout = RelativeLayout()
self.stateSpinner = Spinner(text = "State", size_hint = (0.2, 0.1), pos_hint = {"center_x": 0.4, "top": 1}, on_press = self.stateSelection)
self.citySpinner = Spinner(text = "City", size_hint = (0.2, 0.1), pos_hint = {"center_x": 0.6, "top": 1}, on_press = self.citySelection)
self.venueSpinner = Spinner(text = "Venue", size_hint = (0.3, 0.1), pos_hint = {"center_x": 0.2, "top": 0.875}, on_press = self.venueSelection)
self.addVenueButton = Button (text ="Add", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.38125, "top": 0.875}, on_press = self.addVenueToDate)
self.removeVenueButton = Button (text ="Rem", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.38125, "top": 0.825}, on_press = self.removeVenueFromDate)
self.contactSpinner = Spinner(text = "Contact", size_hint = (0.25, 0.1), pos_hint = {"center_x": 0.7625, "top": 0.875}, on_press = self.contactSelection)
self.addContactButton = Button (text = "Add", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.91875, "top": 0.875}, on_press = self.addContactToDate)
self.removeContactButton = Button (text = "Rem", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.91875, "top": 0.825}, on_press = self.removeContactFromDate)
self.individualRadio = CheckBox(size_hint = (0.05, 0.05), pos_hint = {"center_x": 0.6125, "top": 0.875})
self.individualRadio.active = True
self.individualRadio.group = "ContactSelection"
self.organizationRadio = CheckBox(size_hint = (0.05, 0.05), pos_hint = {"center_x": 0.6125, "top": 0.825})
self.organizationRadio.active = False
self.organizationRadio.group = "ContactSelection"
self.individualLabel = Label(text = "Individual:", size_hint = (0.2, 0.05), pos_hint = {"center_x": 0.5125, "top": 0.875})
self.organizationLabel = Label(text = "Organization:", size_hint = (0.2, 0.05), pos_hint = {"center_x": 0.5125, "top": 0.825})
self.noteLabel = Label(text = "Notes:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.75})
self.noteInput = TextInput(size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.75}, multiline = True)
self.noteInput.text = self.selectedDate.notes
self.currentVenuesLabel = Label(text = "Current Venues:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.525})
self.currentVenuesScrollView = ScrollView(size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.525})
self.currentVenuesText = Label(size_hint = (1, None))
self.currentVenuesText.bind(width = lambda *x: self.currentVenuesText.setter("text_size")(self.currentVenuesText, (self.currentVenuesText.width, None)), texture_size = lambda *x: self.currentVenuesText.setter("height")(self.currentVenuesText, self.currentVenuesText.texture_size[1]))
self.currentVenuesScrollView.add_widget(self.currentVenuesText)
self.relevantContactsLabel = Label(text = "Relevant Contacts:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.3})
self.relevantContactsScrollView = ScrollView(size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.3})
self.relevantContactsText = Label(size_hint = (1, None))
self.relevantContactsText.bind(width = lambda *x: self.relevantContactsText.setter("text_size")(self.relevantContactsText, (self.relevantContactsText.width, None)), texture_size = lambda *x: self.relevantContactsText.setter("height")(self.relevantContactsText, self.relevantContactsText.texture_size[1]))
self.relevantContactsScrollView.add_widget(self.relevantContactsText)
self.submitDateButton = Button(text = "Submit", size_hint = (0.2, 0.075), pos_hint = {"right": 0.8, "bottom": 0})
self.closeButton = Button(text = "Close", size_hint = (0.2, 0.075), pos_hint = {"right": 1, "bottom": 0})
self.popupLayout.add_widget(self.individualLabel)
self.popupLayout.add_widget(self.organizationLabel)
self.popupLayout.add_widget(self.noteLabel)
self.popupLayout.add_widget(self.currentVenuesLabel)
self.popupLayout.add_widget(self.relevantContactsLabel)
self.popupLayout.add_widget(self.addVenueButton)
self.popupLayout.add_widget(self.removeVenueButton)
self.popupLayout.add_widget(self.addContactButton)
self.popupLayout.add_widget(self.removeContactButton)
self.popupLayout.add_widget(self.submitDateButton)
self.popupLayout.add_widget(self.closeButton)
self.popupLayout.add_widget(self.noteInput)
self.popupLayout.add_widget(self.currentVenuesScrollView)
self.popupLayout.add_widget(self.relevantContactsScrollView)
self.popupLayout.add_widget(self.individualRadio)
self.popupLayout.add_widget(self.organizationRadio)
self.popupLayout.add_widget(self.stateSpinner)
self.popupLayout.add_widget(self.citySpinner)
self.popupLayout.add_widget(self.venueSpinner)
self.popupLayout.add_widget(self.contactSpinner)
#Populate the popup's text boxes so that it contains all relevant data
for venue in self.selectedDate.venues:
print(venue.venueName)
if self.currentVenuesText.text == "":
self.currentVenuesText.text = venue.venueName
else:
self.currentVenuesText.text += (", "+venue.venueName)
for contact in self.selectedDate.contacts:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = contact.name
else:
self.relevantContactsText.text += (", "+contact.name)
for organization in self.selectedDate.organizations:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = organization.organizationName
else:
self.relevantContactsText.text += (", "+organization.organizationName)
self.popup = Popup(title = "Edit Date", content = self.popupLayout, size_hint = (0.8, 0.85))
self.popup.open()
self.submitDateButton.bind(on_press = self.submitEditedDateData)
self.closeButton.bind(on_press = self.popup.dismiss)
#If the day isn't found, add it
else:
#Add a new date to the database
self.selectedDate = dayinfo.DayInfo(self.dateNameLabel.text)
#Create a popup with which to create the day's associated info
self.popupLayout = RelativeLayout()
self.stateSpinner = Spinner(text = "State", size_hint = (0.2, 0.1), pos_hint = {"center_x": 0.4, "top": 1}, on_press = self.stateSelection)
self.citySpinner = Spinner(text = "City", size_hint = (0.2, 0.1), pos_hint = {"center_x": 0.6, "top": 1}, on_press = self.citySelection)
self.venueSpinner = Spinner(text = "Venue", size_hint = (0.3, 0.1), pos_hint = {"center_x": 0.2, "top": 0.875}, on_press = self.venueSelection)
self.addVenueButton = Button (text ="Add", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.38125, "top": 0.875}, on_press = self.addVenueToDate)
self.removeVenueButton = Button (text ="Rem", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.38125, "top": 0.825}, on_press = self.removeVenueFromDate)
self.contactSpinner = Spinner(text = "Contact", size_hint = (0.25, 0.1), pos_hint = {"center_x": 0.7625, "top": 0.875}, on_press = self.contactSelection)
self.addContactButton = Button (text = "Add", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.91875, "top": 0.875}, on_press = self.addContactToDate)
self.removeContactButton = Button (text = "Rem", size_hint = (0.0625, 0.05), pos_hint = {"center_x": 0.91875, "top": 0.825}, on_press = self.removeContactFromDate)
self.individualRadio = CheckBox(size_hint = (0.05, 0.05), pos_hint = {"center_x": 0.6125, "top": 0.875})
self.individualRadio.active = True
self.individualRadio.group = "ContactSelection"
self.organizationRadio = CheckBox(size_hint = (0.05, 0.05), pos_hint = {"center_x": 0.6125, "top": 0.825})
self.organizationRadio.active = False
self.organizationRadio.group = "ContactSelection"
self.individualLabel = Label(text = "Individual:", size_hint = (0.2, 0.05), pos_hint = {"center_x": 0.5125, "top": 0.875})
self.organizationLabel = Label(text = "Organization:", size_hint = (0.2, 0.05), pos_hint = {"center_x": 0.5125, "top": 0.825})
self.noteLabel = Label(text = "Notes:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.75})
self.noteInput = TextInput(text = "Insert notes here", size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.75}, multiline = True)
self.currentVenuesLabel = Label(text = "Current Venues:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.525})
self.currentVenuesScrollView = ScrollView(size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.525})
self.currentVenuesText = Label(size_hint = (1, None))
self.currentVenuesText.bind(width = lambda *x: self.currentVenuesText.setter("text_size")(self.currentVenuesText, (self.currentVenuesText.width, None)), texture_size = lambda *x: self.currentVenuesText.setter("height")(self.currentVenuesText, self.currentVenuesText.texture_size[1]))
self.currentVenuesScrollView.add_widget(self.currentVenuesText)
self.relevantContactsLabel = Label(text = "Relevant Contacts:", size_hint = (0.1, 0.05), pos_hint = {"center_x": 0.1, "top": 0.3})
self.relevantContactsScrollView = ScrollView(size_hint = (0.7, 0.225), pos_hint = {"center_x": 0.6, "top": 0.3})
self.relevantContactsText = Label(size_hint = (1, None))
self.relevantContactsText.bind(width = lambda *x: self.relevantContactsText.setter("text_size")(self.relevantContactsText, (self.relevantContactsText.width, None)), texture_size = lambda *x: self.relevantContactsText.setter("height")(self.relevantContactsText, self.relevantContactsText.texture_size[1]))
self.relevantContactsScrollView.add_widget(self.relevantContactsText)
self.submitDateButton = Button(text = "Submit", size_hint = (0.2, 0.075), pos_hint = {"right": 0.8, "bottom": 0})
self.closeButton = Button(text = "Close", size_hint = (0.2, 0.075), pos_hint = {"right": 1, "bottom": 0})
self.popupLayout.add_widget(self.individualLabel)
self.popupLayout.add_widget(self.organizationLabel)
self.popupLayout.add_widget(self.noteLabel)
self.popupLayout.add_widget(self.currentVenuesLabel)
self.popupLayout.add_widget(self.relevantContactsLabel)
self.popupLayout.add_widget(self.addVenueButton)
self.popupLayout.add_widget(self.removeVenueButton)
self.popupLayout.add_widget(self.addContactButton)
self.popupLayout.add_widget(self.removeContactButton)
self.popupLayout.add_widget(self.submitDateButton)
self.popupLayout.add_widget(self.closeButton)
self.popupLayout.add_widget(self.noteInput)
self.popupLayout.add_widget(self.currentVenuesScrollView)
self.popupLayout.add_widget(self.relevantContactsScrollView)
self.popupLayout.add_widget(self.individualRadio)
self.popupLayout.add_widget(self.organizationRadio)
self.popupLayout.add_widget(self.stateSpinner)
self.popupLayout.add_widget(self.citySpinner)
self.popupLayout.add_widget(self.venueSpinner)
self.popupLayout.add_widget(self.contactSpinner)
self.popup = Popup(title = "Add Date", content = self.popupLayout, size_hint = (0.8, 0.85))
self.popup.open()
self.submitDateButton.bind(on_press = self.submitDateData)
self.closeButton.bind(on_press = self.popup.dismiss)
else:
popup = Popup(title = "Date not initialized", size_hint = (0.4, 0.45))
popup.open()
#Remove the date from Google Sheets altogether
def removeDate(self, instance):
if self.dateNameLabel.text != "Day Info" and datacenter.linkValid is True:
dateFound = datacenter.dateFinder(self.dateNameLabel.text)
if dateFound is True:
datacenter.removeDateFromSpreadsheet(self.dateNameLabel.text)
popup = Popup(title = "Date Removed", size_hint = (0.4, 0.45))
popup.open()
else:
popup = Popup(title = "Date Not Found", size_hint = (0.4, 0.45))
popup.open()
else:
popup = Popup(title = "Date Not Initialized", size_hint = (0.4, 0.45))
popup.open()
#Generate a popup to give more detailed date info
def expandDate(self, instance):
if self.dateNameLabel.text != "Day Info" and datacenter.linkValid is True:
dateFound = datacenter.dateFinder(self.dateNameLabel.text)
if dateFound is True:
population = datacenter.populateDate(self.dateNameLabel.text)
self.selectedDate = datacenter.selectDate(self.dateNameLabel.text)
self.expandedView = RelativeLayout()
#self.expandedScrollView = ScrollView(size_hint = (1, 0.9), pos_hint = {"center_x": 0.5, "top": 1})
#self.expandedText = Label(size_hint = (1, None))
#self.expandedText.bind(width = lambda *x: self.expandedText.setter("text_size")(self.expandedText, (self.expandedText.width, None)), texture_size = lambda *x: self.expandedText.setter("height")(self.expandedText, self.expandedText.texture_size[1]))
#self.expandedScrollView.add_widget(self.expandedText)
self.expandedText = TextInput(size_hint = (1, 0.9), pos_hint = {"center_x": 0.5, "top": 1}, multiline = True)
self.closeButton = Button(text = "Close", size_hint = (0.2, 0.075), pos_hint = {"right": 1, "bottom": 0})
#self.expandedView.add_widget(self.expandedScrollView)
self.expandedView.add_widget(self.expandedText)
self.expandedView.add_widget(self.closeButton)
for venue in self.selectedDate.venues:
print(venue.venueName)
if self.expandedText.text == "":
self.expandedText.text = venue.venueName
self.expandedText.text += ("\n "+venue.address+"\n "+venue.cityName+", "+venue.stateName+" "+venue.zip+"\n Phone: "+venue.phone+" Email: "+venue.email+"\n Links: "+venue.links+"\n Associated Contacts: "+venue.contacts+"\nNotes:\n"+venue.notes)
else:
self.expandedText.text += ("\n\n"+venue.venueName)
self.expandedText.text += ("\n "+venue.address+"\n "+venue.cityName+", "+venue.stateName+" "+venue.zip+"\n Phone: "+venue.phone+" Email: "+venue.email+"\n Links: "+venue.links+"\n Associated Contacts: "+venue.contacts+"\nNotes:\n"+venue.notes)
for contact in self.selectedDate.contacts:
if self.expandedText.text == "":
self.expandedText.text = contact.name
self.expandedText.text += ("\n "+contact.address+"\n "+contact.cityName+", "+contact.stateName+" "+contact.zip+"\n Phone: "+contact.phone+" Email: "+contact.email+"\n Links: "+contact.links+"\n Associated Contacts: "+contact.associations+"\nNotes:\n"+contact.notes)
else:
self.expandedText.text += ("\n\n"+contact.name)
self.expandedText.text += ("\n "+contact.address+"\n "+contact.cityName+", "+contact.stateName+" "+contact.zip+"\n Phone: "+contact.phone+" Email: "+contact.email+"\n Links: "+contact.links+"\n Associated Contacts: "+contact.associations+"\nNotes:\n"+contact.notes)
for organization in self.selectedDate.organizations:
if self.expandedText.text == "":
self.expandedText.text = organization.organizationName
self.expandedText.text += ("\n "+organization.address+"\n "+organization.cityName+", "+organization.stateName+" "+organization.zip+"\n Phone: "+organization.phone+" Email: "+organization.email+"\n Links: "+organization.links+"\n Associated Contacts: "+organization.members+"\nNotes:\n"+organization.notes)
else:
self.expandedText.text += ("\n\n"+organization.organizationName)
self.expandedText.text += ("\n "+organization.address+"\n "+organization.cityName+", "+organization.stateName+" "+organization.zip+"\n Phone: "+organization.phone+" Email: "+organization.email+"\n Links: "+organization.links+"\n Associated Contacts: "+organization.members+"\nNotes:\n"+organization.notes)
self.popup = Popup(title = "Expanded Date", content = self.expandedView, size_hint = (0.8, 0.85))
self.popup.open()
self.closeButton.bind(on_press = self.popup.dismiss)
else:
popup = Popup(title = "Date Not Found", size_hint = (0.4, 0.45))
popup.open()
else:
popup = Popup(title = "Date Not Initialized", size_hint = (0.4, 0.45))
popup.open()
#Add the selected venue or contact on button press
def addVenueToDate(self, instance):
if len(self.venueSpinner.values) > 0:
self.selectedVenue = self.selectedCity.selectVenue(self.venueSpinner.text)
self.selectedDate.addVenue(self.selectedVenue)
self.currentVenuesText.text = ""
for venue in self.selectedDate.venues:
if self.currentVenuesText.text == "":
self.currentVenuesText.text = venue.venueName
else:
self.currentVenuesText.text += (", "+venue.venueName)
def addContactToDate(self, instance):
if self.individualRadio.active == True and len(self.contactSpinner.values) > 0:
self.selectedContact = self.selectedCity.selectContact(self.contactSpinner.text)
self.selectedDate.addContact(self.selectedContact)
self.relevantContactsText.text = ""
for contact in self.selectedDate.contacts:
if self.relevantContactsText.text == "":
print("Accessed this field")
print(contact.name)
print("End field")
self.relevantContactsText.text = contact.name
else:
self.relevantContactsText.text += (", "+contact.name)
for organization in self.selectedDate.organizations:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = organization.organizationName
else:
self.relevantContactsText.text += (", "+organization.organizationName)
elif self.organizationRadio.active == True and len(self.contactSpinner.values) > 0:
self.selectedOrganization = self.selectedCity.selectOrganization(self.contactSpinner.text)
self.selectedDate.addOrganization(self.selectedOrganization)
self.relevantContactsText.text = ""
for contact in self.selectedDate.contacts:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = contact.name
else:
self.relevantContactsText.text += (", "+contact.name)
for organization in self.selectedDate.organizations:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = organization.organizationName
else:
self.relevantContactsText.text += (", "+organization.organizationName)
#Likewise, remove selected venue or contact on button press
def removeVenueFromDate(self, instance):
if len(self.venueSpinner.values) > 0:
self.selectedVenue = self.selectedCity.selectVenue(self.venueSpinner.text)
if self.selectedVenue.venueName in self.selectedDate.venueNames:
#While the name might be correct and within selectedDate, the selectedContact object is not actually within selectedDate
#Ergo, this points to the object actually stored within selectedDate
self.selectedVenue = self.selectedDate.selectVenue(self.selectedVenue.venueName)
self.selectedDate.removeVenue(self.selectedVenue)
self.currentVenuesText.text = ""
for venue in self.selectedDate.venues:
if self.currentVenuesText.text == "":
self.currentVenuesText.text = venue.venueName
else:
self.currentVenuesText.text += (", "+venue.venueName)
def removeContactFromDate(self, instance):
if self.individualRadio.active == True and len(self.contactSpinner.values) > 0:
self.selectedContact = self.selectedCity.selectContact(self.contactSpinner.text)
if self.selectedContact.name in self.selectedDate.contactNames:
#While the name might be correct and within selectedDate, the selectedContact object is not actually within selectedDate
#Ergo, this points to the object actually stored within selectedDate
self.selectedContact = self.selectedDate.selectContact(self.selectedContact.name)
print(self.selectedContact.name)
self.selectedDate.removeContact(self.selectedContact)
print(self.selectedDate.contactNames)
self.relevantContactsText.text = ""
for contact in self.selectedDate.contacts:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = contact.name
else:
self.relevantContactsText.text += (", "+contact.name)
for organization in self.selectedDate.organizations:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = organization.organizationName
else:
self.relevantContactsText.text += (", "+organization.organizationName)
elif self.organizationRadio.active == True and len(self.contactSpinner.values) > 0:
self.selectedOrganization = self.selectedCity.selectOrganization(self.contactSpinner.text)
print(self.selectedOrganization.organizationName)
print(self.selectedDate.organizationNames)
if self.selectedOrganization.organizationName in self.selectedDate.organizationNames:
self.selectedOrganization = self.selectedDate.selectOrganization(self.selectedOrganization.organizationName)
print(self.selectedOrganization.organizationName)
self.selectedDate.removeOrganization(self.selectedOrganization)
print(self.selectedDate.organizationNames)
for contact in self.selectedDate.contacts:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = self.selectedContact.name
else:
self.relevantContactsText.text += (", "+self.selectedContact.name)
for organization in self.selectedDate.organizations:
if self.relevantContactsText.text == "":
self.relevantContactsText.text = self.selectedOrganization.organizationName
else:
self.relevantContactsText.text += (", "+self.selectedOrganization.organizationName)
def submitDateData(self, instance):
if self.noteInput.text != "":
self.selectedDate.notes = self.noteInput.text
else:
self.selectedDate.notes = "No notes given."
datacenter.addDateToSpreadsheet(self.dateNameLabel.text)
datacenter.submitDate(self.selectedDate)
popupContent = RelativeLayout()
popupLabel = Label(text = "Date submission was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitEditedDateData(self, instance):
if self.noteInput.text != "":
self.selectedDate.notes = self.noteInput.text
else:
self.selectedDate.notes = "No notes given."
datacenter.submitEditedDate(self.selectedDate)
popupContent = RelativeLayout()
popupLabel = Label(text = "Date edit was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Edit Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
#Populate the spinners on click if the link to the database is valid and set up error catching
def stateSelection(self, instance):
self.cityPicked = False
print(self.stateSpinner.text)
if datacenter.linkValid is True:
datacenter.stateNames.sort()
self.stateSpinner.values = datacenter.stateNames
self.statePicked = True
self.citySpinner.values = []
self.citySpinner.text = "City"
self.venueSpinner.values = []
self.venueSpinner.text = "Venue"
self.contactSpinner.values = []
self.contactSpinner.text = "Contact"
def citySelection(self, instance):
print(self.citySpinner.text)
if datacenter.linkValid is True:
if self.statePicked is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
self.selectedState.cityNames.sort()
self.citySpinner.values = self.selectedState.cityNames
self.cityPicked = True
self.venueSpinner.values = []
self.venueSpinner.text = "Venue"
self.contactSpinner.values = []
self.contactSpinner.text = "Contact"
def venueSelection(self, instance):
print(self.venueSpinner.text)
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.venueNames) > 0:
self.selectedCity.venueNames.sort()
self.venueSpinner.values = self.selectedCity.venueNames
def contactSelection(self, instance):
self.contactSpinner.values = []
self.contactSpinner.text = "Contact"
print(self.contactSpinner.text)
if self.individualRadio.active == True:
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.contactNames) > 0:
self.selectedCity.contactNames.sort()
self.contactSpinner.values = self.selectedCity.contactNames
elif self.organizationRadio.active == True:
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.organizationNames) > 0:
self.selectedCity.organizationNames.sort()
self.contactSpinner.values = self.selectedCity.organizationNames
#Provide an overall view for the Database Manager screen
class DatabaseViewer(RelativeLayout):
stateCitySorterLayout = RelativeLayout(size_hint = (0.3,0.12), pos_hint = {"center_x": 0.5, "top": 0.985})
venueSelectorLayout = RelativeLayout(size_hint = (0.4, 0.12), pos_hint = {"center_x": 0.25, "top": 0.85})
venueAlterationLayout = RelativeLayout(size_hint = (0.125, 1), pos_hint = {"right": 1, "center_y": 0.5})
updateVenueLayout = RelativeLayout(size_hint = (0.45, 0.05), pos_hint = {"center_x": 0.275, "top": .7125})
contactSelectorLayout = RelativeLayout(size_hint = (0.4, 0.12), pos_hint = {"center_x": 0.75, "top": 0.85})
contactTypeLayout = RelativeLayout(size_hint = (0.4, 1), pos_hint = {"left": 0, "center_y": 0.5})
contactAlterationLayout = RelativeLayout(size_hint = (0.125, 1), pos_hint = {"right": 1, "center_y": 0.5})
updateContactLayout = RelativeLayout(size_hint = (0.45, 0.05), pos_hint = {"center_x": 0.725, "top": .7125})
infoBoxLayout = RelativeLayout(size_hint = (0.9,0.65), pos_hint = {"center_x": 0.5, "bottom": 0.02})
locationInfoLayout = RelativeLayout(size_hint = (0.5, 1), pos_hint = {"left": 0, "center_y": 0.5})
contactInfoLayout = RelativeLayout(size_hint = (0.5, 1), pos_hint = {"right": 1, "center_y": 0.5})
def __init__(self, **kwargs):
self.pos_hint = {"center_x": 0.5, "bottom": 0}
self.selectedState = None
self.selectedCity = None
self.selectedVenue = None
self.selectedContact = None
self.selectedOrganization = None
self.statePicked = False
self.cityPicked = False
self.venuePicked = False
self.rowNum = 0
self.range = ""
super(DatabaseViewer, self).__init__(**kwargs)
#Add embedded layout design
self.add_widget(self.stateCitySorterLayout)
self.add_widget(self.venueSelectorLayout)
self.add_widget(self.updateVenueLayout)
self.add_widget(self.contactSelectorLayout)
self.add_widget(self.updateContactLayout)
self.add_widget(self.infoBoxLayout)
#Add venue selection layouts
self.venueSelectorLayout.add_widget(self.venueAlterationLayout)
#Add contact selection layouts
self.contactSelectorLayout.add_widget(self.contactTypeLayout)
self.contactSelectorLayout.add_widget(self.contactAlterationLayout)
#Add info box layouts
self.infoBoxLayout.add_widget(self.locationInfoLayout)
self.infoBoxLayout.add_widget(self.contactInfoLayout)
#Manage widgets within the state and city selector - ON PRESS FOR STATE, REFRESH DATA WITHIN DATABASE IF LINK VALIDATED
self.stateSpinner = Spinner(text= "State", size_hint_x = 0.5, pos_hint = {"left": 0, "center_y": 0.5}, on_press = self.stateSelection)
self.citySpinner = Spinner(text = "City", size_hint_x = 0.5, pos_hint = {"right": 1, "center_y": 0.5}, on_press = self.citySelection)
self.stateCitySorterLayout.add_widget(self.stateSpinner)
self.stateCitySorterLayout.add_widget(self.citySpinner)
#Manage widgets within the venue selector layout
self.venueSpinnerOption = SpinnerOption(on_press = self.updateVenueInfoBox)
self.venueSpinner = Spinner(text = "Venue", size_hint_x = 0.875, pos_hint = {"left": 0, "center_y": 0.5}, on_press = self.venueSelection)
self.venueSpinner.bind(on_text = self.updateVenueInfoBox)
self.venueSelectorLayout.add_widget(self.venueSpinner)
#Manage widgets within the venue selector's venue alteration layout
self.newVenueButton = Button(text = "New", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "top": 1})
self.newVenueButton.bind(on_press = self.newVenue)
self.editVenueButton = Button(text = "Edit", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "center_y": 0.5})
self.editVenueButton.bind(on_press = self.editVenue)
self.removeVenueButton = Button(text = "Rem", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "bottom": 0})
self.removeVenueButton.bind(on_press = self.removeVenue)
self.venueAlterationLayout.add_widget(self.newVenueButton)
self.venueAlterationLayout.add_widget(self.editVenueButton)
self.venueAlterationLayout.add_widget(self.removeVenueButton)
#Manage button to update venue text
self.updateVenueInfoBoxButton = Button(text = "Update Venue Info Below:", on_press = self.updateVenueInfoBox)
self.updateVenueLayout.add_widget(self.updateVenueInfoBoxButton)
#Manage widgets within the contact selector layout
self.contactSelectorSpinner = Spinner(text = "Contact", size_hint_x = 0.5, pos_hint = {"center_x": 0.625, "center_y": 0.5}, on_press = self.contactSelection)
self.contactSelectorLayout.add_widget(self.contactSelectorSpinner)
#Manage widgets within the contact selector's contact type layout
self.contactTypeLayout.add_widget(Label(text = "Individual", size_hint = (0.5, 0.5), pos_hint = {"left": 0.4, "top": 1}))
self.contactTypeLayout.add_widget(Label(text = "Organization", size_hint = (0.5, 0.5), pos_hint = {"left": 0.4, "bottom": 0}))
self.individualRadio = CheckBox(size_hint = (0.25, 0.5), pos_hint = {"right": 0.9, "top": 1})
self.organizationRadio = CheckBox(size_hint = (0.25, 0.5), pos_hint = {"right": 0.9, "bottom": 0})
self.individualRadio.text = "Individual"
self.individualRadio.active = True
self.individualRadio.group = "ContactSelection"
self.organizationRadio.text = "Organization"
self.organizationRadio.active = False
self.organizationRadio.group = "ContactSelection"
self.contactTypeLayout.add_widget(self.individualRadio)
self.contactTypeLayout.add_widget(self.organizationRadio)
#Manage button to update venue text
self.updateContactInfoBoxButton = Button(text = "Update Contact Info Below:", on_press = self.updateContactInfoBox)
self.updateContactLayout.add_widget(self.updateContactInfoBoxButton)
#Manage widgets within the contact selector's contact alteration layout
self.newContactButton = Button(text = "New", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "top": 1})
self.newContactButton.bind(on_press = self.newContact)
self.editContactButton = Button(text = "Edit", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "center_y": 0.5})
self.editContactButton.bind(on_press = self.editContact)
self.removeContactButton = Button(text = "Rem", size_hint_y = 0.33, pos_hint = {"center_x": 0.5, "bottom": 0})
self.removeContactButton.bind(on_press = self.removeContact)
self.contactAlterationLayout.add_widget(self.newContactButton)
self.contactAlterationLayout.add_widget(self.editContactButton)
self.contactAlterationLayout.add_widget(self.removeContactButton)
#Manage widgets within the info box's location info layout
self.venueNameLabel = Label(text = "Venue:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 1})
self.venueNameText = Label(size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 1})
self.stateVenueNameLabel = Label(text = "State:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.94})
self.stateVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.94})
self.cityVenueNameLabel = Label(text = "City:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.88})
self.cityVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.88})
self.addressVenueNameLabel = Label(text = "Address:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.82})
self.addressVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.82})
self.zipVenueNameLabel = Label(text = "Zip:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.76})
self.zipVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.76})
self.phoneVenueNameLabel = Label(text = "Phone:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.7})
self.phoneVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.7})
self.emailVenueNameLabel = Label(text = "Email:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.64})
self.emailVenueNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.64})
self.linksVenueNameLabel = Label(text = "Links:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.58})
self.linksVenueScrollView = ScrollView(size_hint = (0.8, 0.12), pos_hint = {"right": 1, "top": 0.58})
self.linksVenueLabel = Label(size_hint = (1, None))
self.linksVenueLabel.bind(width = lambda *x: self.linksVenueLabel.setter("text_size")(self.linksVenueLabel, (self.linksVenueLabel.width, None)), texture_size = lambda *x: self.linksVenueLabel.setter("height")(self.linksVenueLabel, self.linksVenueLabel.texture_size[1]))
self.linksVenueScrollView.add_widget(self.linksVenueLabel)
self.contactsVenueNameLabel = Label(text = "Contacts:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.46})
self.contactsVenueScrollView = ScrollView(size_hint = (0.8, 0.12), pos_hint = {"right": 1, "top": 0.46})
self.contactsVenueLabel = Label(size_hint = (1, None))
self.contactsVenueLabel.bind(width = lambda *x: self.contactsVenueLabel.setter("text_size")(self.contactsVenueLabel, (self.contactsVenueLabel.width, None)), texture_size = lambda *x: self.contactsVenueLabel.setter("height")(self.contactsVenueLabel, self.contactsVenueLabel.texture_size[1]))
self.contactsVenueScrollView.add_widget(self.contactsVenueLabel)
self.notesVenueNameLabel = Label(text = "Notes:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.34})
self.notesVenueScrollView = ScrollView(size_hint = (0.8, 0.24), pos_hint = {"right": 1, "top": 0.34})
self.notesVenueLabel = Label(size_hint = (1, None))
self.notesVenueLabel.bind(width = lambda *x: self.notesVenueLabel.setter("text_size")(self.notesVenueLabel, (self.notesVenueLabel.width, None)), texture_size = lambda *x: self.notesVenueLabel.setter("height")(self.notesVenueLabel, self.notesVenueLabel.texture_size[1]))
self.notesVenueScrollView.add_widget(self.notesVenueLabel)
self.locationInfoLayout.add_widget(self.venueNameLabel)
self.locationInfoLayout.add_widget(self.venueNameText)
self.locationInfoLayout.add_widget(self.stateVenueNameLabel)
self.locationInfoLayout.add_widget(self.stateVenueNameText)
self.locationInfoLayout.add_widget(self.cityVenueNameLabel)
self.locationInfoLayout.add_widget(self.cityVenueNameText)
self.locationInfoLayout.add_widget(self.addressVenueNameLabel)
self.locationInfoLayout.add_widget(self.addressVenueNameText)
self.locationInfoLayout.add_widget(self.zipVenueNameLabel)
self.locationInfoLayout.add_widget(self.zipVenueNameText)
self.locationInfoLayout.add_widget(self.phoneVenueNameLabel)
self.locationInfoLayout.add_widget(self.phoneVenueNameText)
self.locationInfoLayout.add_widget(self.emailVenueNameLabel)
self.locationInfoLayout.add_widget(self.emailVenueNameText)
self.locationInfoLayout.add_widget(self.linksVenueNameLabel)
self.locationInfoLayout.add_widget(self.linksVenueScrollView)
self.locationInfoLayout.add_widget(self.contactsVenueNameLabel)
self.locationInfoLayout.add_widget(self.contactsVenueScrollView)
self.locationInfoLayout.add_widget(self.notesVenueNameLabel)
self.locationInfoLayout.add_widget(self.notesVenueScrollView)
#Manage widgets within the info box's contact info layout
self.contactNameLabel = Label(text = "Contact:", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 1})
self.contactNameText = Label(size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 1})
self.stateContactNameLabel = Label(text = "State:", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.94})
self.stateContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.94})
self.cityContactNameLabel = Label(text = "City:", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.88})
self.cityContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.88})
self.addressContactNameLabel = Label(text = "Address:", halign = "center", valign = "middle", text_size = self.size, size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.82})
self.addressContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.82})
self.zipContactNameLabel = Label(text = "Zip:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.76})
self.zipContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.76})
self.phoneContactNameLabel = Label(text = "Phone:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.7})
self.phoneContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.7})
self.emailContactNameLabel = Label(text = "Email:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.64})
self.emailContactNameText = Label(halign = "left", size_hint = (0.8, 0.06), pos_hint = {"right": 1, "top": 0.64})
self.linksContactNameLabel = Label(text = "Links:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.58})
self.linksContactScrollView = ScrollView(size_hint = (0.8, 0.12), pos_hint = {"right": 1, "top": 0.58})
self.linksContactLabel = Label(size_hint = (1, None))
self.linksContactLabel.bind(width = lambda *x: self.linksContactLabel.setter("text_size")(self.linksContactLabel, (self.linksContactLabel.width, None)), texture_size = lambda *x: self.linksContactLabel.setter("height")(self.linksContactLabel, self.linksContactLabel.texture_size[1]))
self.linksContactScrollView.add_widget(self.linksContactLabel)
self.groupsContactNameLabel = Label(text = "Assoc.:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.46})
self.groupsContactScrollView = ScrollView(size_hint = (0.8, 0.12), pos_hint = {"right": 1, "top": 0.46})
self.groupsContactLabel = Label(size_hint = (1, None))
self.groupsContactLabel.bind(width = lambda *x: self.groupsContactLabel.setter("text_size")(self.groupsContactLabel, (self.groupsContactLabel.width, None)), texture_size = lambda *x: self.groupsContactLabel.setter("height")(self.groupsContactLabel, self.groupsContactLabel.texture_size[1]))
self.groupsContactScrollView.add_widget(self.groupsContactLabel)
self.notesContactNameLabel = Label(text = "Notes:", halign = "left", size_hint = (0.2, 0.06), pos_hint = {"left": 0, "top": 0.34})
self.notesContactScrollView = ScrollView(size_hint = (0.8, 0.24), pos_hint = {"right": 1, "top": 0.34})
self.notesContactLabel = Label(size_hint = (1, None))
self.notesContactLabel.bind(width = lambda *x: self.notesContactLabel.setter("text_size")(self.notesContactLabel, (self.notesContactLabel.width, None)), texture_size = lambda *x: self.notesContactLabel.setter("height")(self.notesContactLabel, self.notesContactLabel.texture_size[1]))
self.notesContactScrollView.add_widget(self.notesContactLabel)
self.contactInfoLayout.add_widget(self.contactNameLabel)
self.contactInfoLayout.add_widget(self.contactNameText)
self.contactInfoLayout.add_widget(self.stateContactNameLabel)
self.contactInfoLayout.add_widget(self.stateContactNameText)
self.contactInfoLayout.add_widget(self.cityContactNameLabel)
self.contactInfoLayout.add_widget(self.cityContactNameText)
self.contactInfoLayout.add_widget(self.addressContactNameLabel)
self.contactInfoLayout.add_widget(self.addressContactNameText)
self.contactInfoLayout.add_widget(self.zipContactNameLabel)
self.contactInfoLayout.add_widget(self.zipContactNameText)
self.contactInfoLayout.add_widget(self.phoneContactNameLabel)
self.contactInfoLayout.add_widget(self.phoneContactNameText)
self.contactInfoLayout.add_widget(self.emailContactNameLabel)
self.contactInfoLayout.add_widget(self.emailContactNameText)
self.contactInfoLayout.add_widget(self.linksContactNameLabel)
self.contactInfoLayout.add_widget(self.linksContactScrollView)
self.contactInfoLayout.add_widget(self.groupsContactNameLabel)
self.contactInfoLayout.add_widget(self.groupsContactScrollView)
self.contactInfoLayout.add_widget(self.notesContactNameLabel)
self.contactInfoLayout.add_widget(self.notesContactScrollView)
#Provide functionality for button alterations - bring up popups for new data and for editing data, provide an alert to make sure user wants to remove data
def newVenue(self, instance):
#Create the new overall layout of the popup to be inserted in as content
newVenuePopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newVenuePopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newVenuePopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newVenuePopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newVenuePopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newVenuePopupLayout.add_widget(linksLayout)
#...Contacts...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Contacts (separate with commas):", size_hint_x = 0.4)
self.contactInput = TextInput(size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newVenuePopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newVenuePopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newVenuePopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newVenuePopupLayout.add_widget(buttonLayout)
self.newVenuePopup = Popup(title = "New Venue", content = newVenuePopupLayout, size_hint = (0.85, 0.8))
self.newVenuePopup.open()
submitButton.bind(on_press = self.submitVenueData)
cancelButton.bind(on_press = self.newVenuePopup.dismiss)
def editVenue(self, instance):
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.venueSpinner.text in self.selectedCity.venueNames:
self.selectedVenue = self.selectedCity.selectVenue(self.venueSpinner.text)
self.rowNum = datacenter.obtainVenueRowNumber(self.stateSpinner.text, self.citySpinner.text, self.venueSpinner.text)
self.range = ("Venues!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
#Create the new overall layout of the popup to be inserted in as content
newVenuePopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(text = self.selectedState.stateName, size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(text = self.selectedCity.cityName, size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newVenuePopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(text = self.selectedVenue.venueName, size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newVenuePopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(text = self.selectedVenue.address, size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(text = self.selectedVenue.zip, size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newVenuePopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(text = self.selectedVenue.phone, size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newVenuePopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(text = self.selectedVenue.links, size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newVenuePopupLayout.add_widget(linksLayout)
#...Contacts...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Contacts (separate with commas):", size_hint_x = 0.4)
self.contactInput = TextInput(text = self.selectedVenue.contacts, size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newVenuePopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(text = self.selectedVenue.email, size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newVenuePopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(text = self.selectedVenue.notes, size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newVenuePopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newVenuePopupLayout.add_widget(buttonLayout)
self.newVenuePopup = Popup(title = "New Venue", content = newVenuePopupLayout, size_hint = (0.85, 0.8))
self.newVenuePopup.open()
submitButton.bind(on_press = self.submitEditedVenueData)
cancelButton.bind(on_press = self.newVenuePopup.dismiss)
def removeVenue(self, instance):
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.venueSpinner.text in self.selectedCity.venueNames:
self.selectedVenue = self.selectedCity.selectVenue(self.venueSpinner.text)
self.rowNum = datacenter.obtainVenueRowNumber(self.stateSpinner.text, self.citySpinner.text, self.venueSpinner.text)
self.range = ("Venues!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
self.rowNum -= 1
datacenter.removeVenueRow(self.rowNum)
self.stateSpinner.text = "State"
self.citySpinner.text = "City"
self.venueSpinner.text = "Venue"
def newContact(self, instance):
if self.individualRadio.active == True:
#Create the new overall layout of the popup to be inserted in as content
newContactPopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newContactPopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newContactPopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newContactPopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newContactPopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newContactPopupLayout.add_widget(linksLayout)
#...Associations...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Associations (separate with commas):", size_hint_x = 0.4)
self.contactInput = TextInput(size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newContactPopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newContactPopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newContactPopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newContactPopupLayout.add_widget(buttonLayout)
self.newContactPopup = Popup(title = "New Individual", content = newContactPopupLayout, size_hint = (0.85, 0.8))
self.newContactPopup.open()
submitButton.bind(on_press = self.submitIndividualData)
cancelButton.bind(on_press = self.newContactPopup.dismiss)
elif self.organizationRadio.active == True:
#Create the new overall layout of the popup to be inserted in as content
newContactPopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newContactPopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newContactPopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newContactPopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newContactPopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newContactPopupLayout.add_widget(linksLayout)
#...Members...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Members (separate by commas):", size_hint_x = 0.4)
self.contactInput = TextInput(size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newContactPopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newContactPopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newContactPopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newContactPopupLayout.add_widget(buttonLayout)
self.newContactPopup = Popup(title = "New Organization", content = newContactPopupLayout, size_hint = (0.85, 0.8))
self.newContactPopup.open()
submitButton.bind(on_press = self.submitOrganizationData)
cancelButton.bind(on_press = self.newContactPopup.dismiss)
else:
errorPopup = Popup(title = "Invalid Selection", content = (Label(text = "No contact choice was selected - make sure you choose either an individual or an organization.")), size_hint = (0.85, 0.4))
errorPopup.open()
def editContact(self, instance):
if self.individualRadio.active == True:
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.contactSelectorSpinner.text in self.selectedCity.contactNames:
self.selectedContact = self.selectedCity.selectContact(self.contactSelectorSpinner.text)
self.rowNum = datacenter.obtainIndividualRowNumber(self.stateSpinner.text, self.citySpinner.text, self.contactSelectorSpinner.text)
self.range = ("Individual Contacts!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
#Create the new overall layout of the popup to be inserted in as content
newContactPopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(text = self.selectedState.stateName, size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(text = self.selectedCity.cityName, size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newContactPopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(text = self.selectedContact.name, size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newContactPopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(text = self.selectedContact.address, size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(text = self.selectedContact.zip, size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newContactPopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(text = self.selectedContact.phone, size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newContactPopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(text = self.selectedContact.links, size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newContactPopupLayout.add_widget(linksLayout)
#...Associations...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Associations (separate with commas):", size_hint_x = 0.4)
self.contactInput = TextInput(text = self.selectedContact.associations, size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newContactPopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(text = self.selectedContact.email, size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newContactPopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(text = self.selectedContact.notes, size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newContactPopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newContactPopupLayout.add_widget(buttonLayout)
self.newContactPopup = Popup(title = "New Individual", content = newContactPopupLayout, size_hint = (0.85, 0.8))
self.newContactPopup.open()
submitButton.bind(on_press = self.submitEditedIndividualData)
cancelButton.bind(on_press = self.newContactPopup.dismiss)
elif self.organizationRadio.active == True:
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.contactSelectorSpinner.text in self.selectedCity.organizationNames:
self.selectedOrganization = self.selectedCity.selectOrganization(self.contactSelectorSpinner.text)
self.rowNum = datacenter.obtainOrganizationRowNumber(self.stateSpinner.text, self.citySpinner.text, self.contactSelectorSpinner.text)
self.range = ("Organizational Contacts!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
#Create the new overall layout of the popup to be inserted in as content
newContactPopupLayout = RelativeLayout()
#Create new lines for input...
#...State and city...
stateCityLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.9})
stateLabel = Label(text = "State:", size_hint_x = 0.1)
self.stateInput = TextInput(text = self.selectedState.stateName, size_hint_x = 0.4)
cityLabel = Label(text = "City:", size_hint_x = 0.1)
self.cityInput = TextInput(text = self.selectedCity.cityName, size_hint_x = 0.4)
stateCityLayout.add_widget(stateLabel)
stateCityLayout.add_widget(self.stateInput)
stateCityLayout.add_widget(cityLabel)
stateCityLayout.add_widget(self.cityInput)
newContactPopupLayout.add_widget(stateCityLayout)
#...Name...
nameLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 1})
nameLabel = Label(text = "Name:", size_hint_x = 0.1)
self.nameInput = TextInput(text = self.selectedOrganization.organizationName, size_hint_x = 0.9)
nameLayout.add_widget(nameLabel)
nameLayout.add_widget(self.nameInput)
newContactPopupLayout.add_widget(nameLayout)
#...Address...
addressLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.8})
addressLabel = Label(text = "Address:", size_hint_x = 0.1)
self.addressInput = TextInput(text = self.selectedOrganization.address, size_hint_x = 0.6)
zipLabel = Label(text = "Zip Code:", size_hint_x = 0.1)
self.zipInput = TextInput(size_hint_x = 0.2)
addressLayout.add_widget(addressLabel)
addressLayout.add_widget(self.addressInput)
addressLayout.add_widget(zipLabel)
addressLayout.add_widget(self.zipInput)
newContactPopupLayout.add_widget(addressLayout)
#...Phone #...
phoneLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.7})
phoneLabel = Label(text = "Phone #:", size_hint_x = 0.1)
self.phoneInput = TextInput(text = self.selectedOrganization.phone, size_hint_x = 0.9)
phoneLayout.add_widget(phoneLabel)
phoneLayout.add_widget(self.phoneInput)
newContactPopupLayout.add_widget(phoneLayout)
#...Links...
linksLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.6})
linksLabel = Label(text = "Links (separate with commas):", size_hint_x = 0.4)
self.linksInput = TextInput(text = self.selectedOrganization.links, size_hint_x = 0.6)
linksLayout.add_widget(linksLabel)
linksLayout.add_widget(self.linksInput)
newContactPopupLayout.add_widget(linksLayout)
#...Members...
contactLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.5})
contactLabel = Label(text = "Members (separate by commas):", size_hint_x = 0.4)
self.contactInput = TextInput(text = self.selectedOrganization.members, size_hint_x = 0.6)
contactLayout.add_widget(contactLabel)
contactLayout.add_widget(self.contactInput)
newContactPopupLayout.add_widget(contactLayout)
#...Email...
emailLayout = BoxLayout(size_hint_y = 0.1, pos_hint = {"center_x": 0.5, "top": 0.4})
emailLabel = Label(text = "Email:", size_hint_x = 0.1)
self.emailInput = TextInput(text = self.selectedOrganization.email, size_hint_x = 0.9)
emailLayout.add_widget(emailLabel)
emailLayout.add_widget(self.emailInput)
newContactPopupLayout.add_widget(emailLayout)
#...notes on the contact itself...
notesLayout = BoxLayout(size_hint_y = 0.2, pos_hint = {"center_x": 0.5, "top": 0.3})
notesLabel = Label(text = "Notes:", size_hint_x = 0.1)
self.notesInput = TextInput(text = self.selectedOrganization.notes, size_hint_x = 0.9, multiline = True)
notesLayout.add_widget(notesLabel)
notesLayout.add_widget(self.notesInput)
newContactPopupLayout.add_widget(notesLayout)
#...and finally buttons to submit to sheets or cancel.
buttonLayout = BoxLayout(size_hint = (0.3, 0.09), pos_hint = {"right": 1, "bottom": 0})
submitButton = Button(text = "Submit", size_hint_x = 0.5)
cancelButton = Button(text = "Close", size_hint_x = 0.5)
buttonLayout.add_widget(submitButton)
buttonLayout.add_widget(cancelButton)
newContactPopupLayout.add_widget(buttonLayout)
self.newContactPopup = Popup(title = "New Organization", content = newContactPopupLayout, size_hint = (0.85, 0.8))
self.newContactPopup.open()
submitButton.bind(on_press = self.submitEditedOrganizationData)
cancelButton.bind(on_press = self.newContactPopup.dismiss)
def removeContact(self, instance):
if self.individualRadio.active == True:
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.contactSelectorSpinner.text in self.selectedCity.contactNames:
self.selectedContact = self.selectedCity.selectContact(self.contactSelectorSpinner.text)
self.rowNum = datacenter.obtainIndividualRowNumber(self.stateSpinner.text, self.citySpinner.text, self.contactSelectorSpinner.text)
self.range = ("Individual Contacts!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
self.rowNum -= 1
datacenter.removeIndividualRow(self.rowNum)
self.stateSpinner.text = "State"
self.citySpinner.text = "City"
self.contactSelectorSpinner.text = "Contact"
elif self.organizationRadio.active == True:
if datacenter.linkValid is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.contactSelectorSpinner.text in self.selectedCity.organizationNames:
self.selectedContact = self.selectedCity.selectOrganization(self.contactSelectorSpinner.text)
self.rowNum = datacenter.obtainOrganizationRowNumber(self.stateSpinner.text, self.citySpinner.text, self.contactSelectorSpinner.text)
self.range = ("Organizational Contacts!A"+str(self.rowNum)+":J"+str(self.rowNum))
print(self.range)
self.rowNum -= 1
datacenter.removeOrganizationRow(self.rowNum)
self.stateSpinner.text = "State"
self.citySpinner.text = "City"
self.contactSelectorSpinner.text = "Contact"
#Handle cases for submission of data to sheets
def submitVenueData(self, instance):
if datacenter.linkValid is True:
print("New venue submitted.")
datacenter.submitVenueDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text)
popupContent = RelativeLayout()
popupLabel = Label(text = "Venue submission was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitEditedVenueData(self, instance):
if datacenter.linkValid is True:
print("Edited venue submitted.")
datacenter.submitEditedVenueDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text, self.range)
popupContent = RelativeLayout()
popupLabel = Label(text = "Venue edit was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitIndividualData(self, instance):
if datacenter.linkValid is True:
print("New individual submitted.")
datacenter.submitIndividualDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text)
popupContent = RelativeLayout()
popupLabel = Label(text = "Contact submission was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitEditedIndividualData(self, instance):
if datacenter.linkValid is True:
print("Edited individual submitted.")
datacenter.submitEditedIndividualDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text, self.range)
popupContent = RelativeLayout()
popupLabel = Label(text = "Contact edit was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitOrganizationData(self, instance):
if datacenter.linkValid is True:
print("New organization submitted.")
datacenter.submitOrganizationDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text)
popupContent = RelativeLayout()
popupLabel = Label(text = "Contact submission was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
def submitEditedOrganizationData(self, instance):
if datacenter.linkValid is True:
print("New organization submitted.")
datacenter.submitEditedOrganizationDatabaseInfo(self.stateInput.text, self.cityInput.text, self.nameInput.text, self.addressInput.text, self.zipInput.text, self.phoneInput.text, self.linksInput.text, self.contactInput.text, self.emailInput.text, self.notesInput.text, self.range)
popupContent = RelativeLayout()
popupLabel = Label(text = "Contact edit was successful!", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Successful", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
else:
popupContent = RelativeLayout()
popupLabel = Label(text = "Link to database was not initialized. Please submit link.", size_hint_y = 0.3, pos_hint = {"center_x": 0.5, "top": 0.75})
popupClose = Button(text = "Close", size_hint = (0.5, 0.3), pos_hint = {"center_x": 0.5, "bottom": 0.25})
popupContent.add_widget(popupLabel)
popupContent.add_widget(popupClose)
popup = Popup(title = "Submission Failed", content = popupContent, size_hint = (0.85, 0.4))
popup.open()
popupClose.bind(on_press = popup.dismiss)
#Populate the spinners on click if the link to the database is valid and set up error catching
def stateSelection(self, instance):
self.cityPicked = False
if datacenter.linkValid is True:
datacenter.stateNames.sort()
self.stateSpinner.values = datacenter.stateNames
self.statePicked = True
self.citySpinner.values = []
self.citySpinner.text = "City"
self.venueSpinner.values = []
self.venueSpinner.text = "Venue"
self.contactSelectorSpinner.values = []
self.contactSelectorSpinner.text = "Contact"
def citySelection(self, instance):
if datacenter.linkValid is True:
if self.statePicked is True:
if self.stateSpinner.text in datacenter.stateNames:
self.selectedState = datacenter.selectState(self.stateSpinner.text)
self.selectedState.cityNames.sort()
self.citySpinner.values = self.selectedState.cityNames
self.cityPicked = True
self.venueSpinner.values = []
self.venueSpinner.text = "Venue"
self.contactSelectorSpinner.values = []
self.contactSelectorSpinner.text = "Contact"
def venueSelection(self, instance):
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.venueNames) > 0:
self.selectedCity.venueNames.sort()
self.venueSpinner.values = self.selectedCity.venueNames
def contactSelection(self, instance):
self.contactSelectorSpinner.values = []
self.contactSelectorSpinner.text = "Contact"
if self.individualRadio.active == True:
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.contactNames) > 0:
self.selectedCity.contactNames.sort()
self.contactSelectorSpinner.values = self.selectedCity.contactNames
elif self.organizationRadio.active == True:
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.organizationNames) > 0:
self.selectedCity.organizationNames.sort()
self.contactSelectorSpinner.values = self.selectedCity.organizationNames
#Create a handler for the infoboxes
def updateVenueInfoBox(self, instance):
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.venueNames) > 0:
self.selectedCity.venueNames.sort()
if self.venueSpinner.text in self.selectedCity.venueNames:
self.selectedVenue = self.selectedCity.selectVenue(self.venueSpinner.text)
self.venueNameText.text = self.selectedVenue.venueName
self.stateVenueNameText.text = self.selectedVenue.stateName
self.cityVenueNameText.text = self.selectedVenue.cityName
self.addressVenueNameText.text = self.selectedVenue.address
self.zipVenueNameText.text = self.selectedVenue.zip
self.phoneVenueNameText.text = self.selectedVenue.phone
self.emailVenueNameText.text = self.selectedVenue.email
self.linksVenueLabel.text = self.selectedVenue.links
self.contactsVenueLabel.text = self.selectedVenue.contacts
self.notesVenueLabel.text = self.selectedVenue.notes
def updateContactInfoBox(self, instance):
if self.individualRadio.active == True:
self.groupsContactNameLabel.text = "Assoc.:"
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.contactNames) > 0:
self.selectedCity.contactNames.sort()
if self.contactSelectorSpinner.text in self.selectedCity.contactNames:
self.selectedContact = self.selectedCity.selectContact(self.contactSelectorSpinner.text)
self.contactNameText.text = self.selectedContact.name
self.stateContactNameText.text = self.selectedState.stateName
self.cityContactNameText.text = self.selectedCity.cityName
self.addressContactNameText.text = self.selectedContact.address
self.zipContactNameText.text = self.selectedContact.zip
self.phoneContactNameText.text = self.selectedContact.phone
self.emailContactNameText.text = self.selectedContact.email
self.linksContactLabel.text = self.selectedContact.links
self.groupsContactLabel.text = self.selectedContact.associations
self.notesContactLabel.text = self.selectedContact.notes
elif self.organizationRadio.active == True:
self.groupsContactNameLabel.text = "Members:"
if self.cityPicked is True:
if datacenter.linkValid is True:
if self.citySpinner.text in self.selectedState.cityNames:
self.selectedCity = self.selectedState.selectCity(self.citySpinner.text)
if self.selectedCity.cityName in self.selectedState.cityNames and len(self.selectedCity.organizationNames) > 0:
self.selectedCity.organizationNames.sort()
if self.contactSelectorSpinner.text in self.selectedCity.organizationNames:
self.selectedOrganization = self.selectedCity.selectOrganization(self.contactSelectorSpinner.text)
self.contactNameText.text = self.selectedOrganization.organizationName
self.stateContactNameText.text = self.selectedState.stateName
self.cityContactNameText.text = self.selectedCity.cityName
self.addressContactNameText.text = self.selectedOrganization.address
self.zipContactNameText.text = self.selectedOrganization.zip
self.phoneContactNameText.text = self.selectedOrganization.phone
self.emailContactNameText.text = self.selectedOrganization.email
self.linksContactLabel.text = self.selectedOrganization.links
self.groupsContactLabel.text = self.selectedOrganization.members
self.notesContactLabel.text = self.selectedOrganization.notes
#Handle the Google Sheets database link management (two made - one for the database management page, one for the calendar page)
class DatabaseManagementDatabaseLinkView(BoxLayout):
generalLayout = BoxLayout()
def __init__(self, **kwargs):
super(DatabaseManagementDatabaseLinkView, self).__init__(**kwargs)
self.add_widget(self.generalLayout)
self.generalLayout.databaseLabel = Label(text = "Database: ", size_hint_x = 0.1)
self.generalLayout.add_widget(self.generalLayout.databaseLabel)
self.generalLayout.databaseText = TextInput(size_hint_x = 0.7, multiline = False)
self.generalLayout.databaseText.bind(on_text_validate = self.linkDatabase)
self.generalLayout.add_widget(self.generalLayout.databaseText)
self.generalLayout.databaseSubmit = Button(text = "Submit", size_hint_x = 0.2)
self.generalLayout.databaseSubmit.bind(on_press = self.linkDatabase)
self.generalLayout.add_widget(self.generalLayout.databaseSubmit)
def linkDatabase(self, instance):
datacenter.link = self.generalLayout.databaseText.text
credentials = datacenter.getCredentials()
linkSegments = datacenter.link.split("/")
#Make sure the link provided won't cause an error, and if it will, prevent it from passing through
if len(linkSegments) >= 6:
if linkSegments[2] == "docs.google.com" and linkSegments[3] == "spreadsheets":
datacenter.spreadsheetId = linkSegments[5]
datacenter.linkValid = True
datacenter.databaseConnect(credentials)
popup = Popup(title = "Link Established", content = (Label(text = "Link to Google Sheets successfully established!")), size_hint = (0.85, 0.4))
popup.open()
else:
popup = Popup(title = "Invalid Link", content = (Label(text = "The link you provided is invalid. Check to make sure it's the right link.")), size_hint = (0.85, 0.4))
popup.open()
else:
popup = Popup(title = "Invalid Link", content = (Label(text = "The link you provided is invalid. Check to make sure it's the right link.")), size_hint = (0.85, 0.4))
popup.open()
self.generalLayout.databaseText.text = ""
class CalendarDatabaseLinkView(BoxLayout):
generalLayout = BoxLayout()
def __init__(self, **kwargs):
super(CalendarDatabaseLinkView, self).__init__(**kwargs)
self.add_widget(self.generalLayout)
self.generalLayout.databaseLabel = Label(text = "Database: ", size_hint_x = 0.1)
self.generalLayout.add_widget(self.generalLayout.databaseLabel)
self.generalLayout.databaseText = TextInput(size_hint_x = 0.7, multiline = False)
self.generalLayout.databaseText.bind(on_text_validate = self.linkDatabase)
self.generalLayout.add_widget(self.generalLayout.databaseText)
self.generalLayout.databaseSubmit = Button(text = "Submit", size_hint_x = 0.2)
self.generalLayout.databaseSubmit.bind(on_press = self.linkDatabase)
self.generalLayout.add_widget(self.generalLayout.databaseSubmit)
def linkDatabase(self, instance):
datacenter.link = self.generalLayout.databaseText.text
credentials = datacenter.getCredentials()
linkSegments = datacenter.link.split("/")
#Make sure the link provided won't cause an error, and if it will, prevent it from passing through
if len(linkSegments) >= 6:
if linkSegments[2] == "docs.google.com" and linkSegments[3] == "spreadsheets":
datacenter.spreadsheetId = linkSegments[5]
datacenter.linkValid = True
datacenter.databaseConnect(credentials)
popup = Popup(title = "Link Established", content = (Label(text = "Link to Google Sheets successfully established!")), size_hint = (0.85, 0.4))
popup.open()
else:
popup = Popup(title = "Invalid Link", content = (Label(text = "The link you provided is invalid. Check to make sure it's the right link.")), size_hint = (0.85, 0.4))
popup.open()
else:
popup = Popup(title = "Invalid Link", content = (Label(text = "The link you provided is invalid. Check to make sure it's the right link.")), size_hint = (0.85, 0.4))
popup.open()
self.generalLayout.databaseText.text = ""
#General view classes of the GUI
class ScreenMainMenu(Screen):
pass
class ScreenDatabase(Screen):
spreadsheetLink = ObjectProperty()
#(Controller for Calendar)
class ScreenCalendar(Screen):
spreadsheetLink = ObjectProperty()
screenManager = ScreenManager()
screenManager.add_widget(ScreenMainMenu(name = "screen_mainmenu"))
screenManager.add_widget(ScreenDatabase(name = "screen_database"))
screenManager.add_widget(ScreenCalendar(name = "screen_calendar"))
class WavesInMotion(App):
def build(self):
return screenManager
def main():
WIMApp = WavesInMotion()
WIMApp.run()
if __name__ == '__main__':
main()
| 53.306953 | 329 | 0.722361 | 14,605 | 116,529 | 5.651558 | 0.052859 | 0.040416 | 0.036539 | 0.018294 | 0.815098 | 0.759465 | 0.744903 | 0.725361 | 0.712676 | 0.705565 | 0 | 0.025662 | 0.148607 | 116,529 | 2,185 | 330 | 53.33135 | 0.806304 | 0.056012 | 0 | 0.705525 | 0 | 0.000552 | 0.086849 | 0.003368 | 0 | 0 | 0 | 0 | 0 | 1 | 0.026519 | false | 0.000552 | 0.017127 | 0.000552 | 0.068508 | 0.014917 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c87ed7ad989bf29279f47341741f41988f97f6fb | 93,844 | py | Python | Daz2HouNeo/mainWindow.py | SideswipeeZ/Daz2HouNeo | d58cefce68e5268023d15efe0e34fc7a24ffa56e | [
"MIT"
] | 11 | 2020-12-15T01:48:30.000Z | 2022-03-07T04:01:52.000Z | Daz2HouNeo/mainWindow.py | SideswipeeZ/Daz2HouNeo | d58cefce68e5268023d15efe0e34fc7a24ffa56e | [
"MIT"
] | 1 | 2022-02-16T09:29:22.000Z | 2022-02-20T12:22:12.000Z | Daz2HouNeo/mainWindow.py | SideswipeeZ/Daz2HouNeo | d58cefce68e5268023d15efe0e34fc7a24ffa56e | [
"MIT"
] | 4 | 2021-08-08T18:47:08.000Z | 2022-02-06T14:53:00.000Z | import os, sys, hou, base64, random, string
#Path Var
root_path = os.path.join("X:\ToolLocation\Daz2Hou2") #Ensure There is no extra \ at the end of the string.
sys.path.insert(0,(root_path))
#Qt Import Block
from Qt import QtCore, QtWidgets, QtCompat, QtGui
from geoWindow import *
from debugWindow import *
from shopWindow import *
from ropWindow import *
#SessID used for Console.
sessID = ""
VER = "01.05"
################################################################################
#Main Window Class
class H2Dz(QtWidgets.QMainWindow):
########################################
# MainWindow Initialize #
########################################
def __init__(self, parent=None):
#Super
super(H2Dz, self).__init__(parent, QtCore.Qt.WindowStaysOnTopHint)
self.setParent(hou.qt.mainWindow(), QtCore.Qt.Window)
#File Interface
mwUi = os.path.join(root_path + "\Assets\mainWindow.ui")
#Set Icon
ico64 = "iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAAsTAAALEwEAmpwYAAAF+mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTAzVDE3OjIyOjU2KzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwMzo1Nzo1MiswMTowMCIgeG1wOk1ldGFkYXRhRGF0ZT0iMjAyMC0wOS0wNlQwMzo1Nzo1MiswMTowMCIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciIHBob3Rvc2hvcDpDb2xvck1vZGU9IjMiIHBob3Rvc2hvcDpJQ0NQcm9maWxlPSJzUkdCIElFQzYxOTY2LTIuMSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDowYmRmNDk0Ny04ZTYxLTIwNGItYTM2YS00ZDY3NTliNTc4MWQiIHhtcE1NOkRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDo4NjMxZDk5Ni02ZjM3LTYyNDEtYjU0Mi02NGM2MDYwZWE5ZjIiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDphMjkzOGQ2NC01ZTQzLTg5NDktODcwNi01Y2IwOTNhODhhZmIiPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmEyOTM4ZDY0LTVlNDMtODk0OS04NzA2LTVjYjA5M2E4OGFmYiIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wM1QxNzoyMjo1NiswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDowYmRmNDk0Ny04ZTYxLTIwNGItYTM2YS00ZDY3NTliNTc4MWQiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDM6NTc6NTIrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz5eBkJTAAAZk0lEQVR4nO2bebRnV1XnP/ucc+/9TW9+r+rVmFRVKklFEpIIGkBmjEozLJeiLm1c4th2S9siNtit3a2o2LQsMe1abaM0rh5QsFUcUElQUAKRCA0hkIHKVKRS05uH33DvPWfv/uP+3ktCgA5L7PrDnLfuuvfd3+/de/b37LO/373PeWJm/GNu7mJ34GK3pwC42B242O0pAC52By52ewqAi92Bi93kG976xi/+QUxYkbN2xXFGeU4njgjbA+rkSVIQ2hW1tvBDRTLDBBhBamVYx+EZYT7ghzVl5XE25Fx+gJefvfPIj63dcjAenJ3eir69J9uYv37xbPfBPVfop84Ua8+wpY0Jqbe2R7oSQ3Yq1bJ8ctijN5EzIyP6PufAcJ1P5wv88vwLWUkFk3FESgpeqLMWda04eXIAhK8ell+IoGKuQi2BFSI28bR5DVetTVz2/D8ueoctb8/s1bV24OEZW63aR/yK5ccObrT7s5u+X213O2m5Vr0vjkafmrHRgy2f7qmTbFUqqH71uvnVB0Ai6gwSwZVTLw315MvbvvctbSkOTHpjuHCMP8+nCaq0THlfXfPutc/zult/hxML9y2mFxynOtyjtalQKmYlc3GIjTa3Vkf1X0+V/ffaML7b1fW2qGI8yaH+Es0f/qZv+OJ2qEHwjObniN6TacRVNWoOk4DLEmoBFw3xgItoXZF856pc9/xspz74jom49wc7Yer6VvCTzkNlhsYR7XKDVr1JHrcIMuTBhQP88f7nMvd3F7jqL26ltoBe4rFcsLKN5NNIMVtM9qYvn+q2XzHTGfzzNVfsfbe//N6YbL1lCTPACeoDqoY8SVz+ngBk+BQxX5JSdszb4V+dDIfePsHM1+cu9JxPJCLJlKhGrUayRK1KqUY1vtcdbqIB/ugZL+HIw6f5mj+6le3VDu4SkPmEbg4wjURyCp/T7xxuvWvum58V8/a/IvY7g6gftai18+7/LwAmipb0SPvf3PHHfqdXzF3rnRGpiaYkAzVDx2fDSCaAAAYGqkZlEOoSH2v+4sRz+LrVuzj2ibvYuCfD7THcpQ4bRPI0wnTITf4GTvpDXJ8pi532cyTjR9bL0QO1cjch+4cGwIMTXDagGk29NI/H/qpX7H1JFiDpiGiKamOsYY3hZiiPgmEITgQBREAwEkJWDjnTm2OpmOKV93+ANBKq20e4vYa7zNHa3OQO2c//ClcxndYZaqItGftb3U6vHb5jw2rZGvEhQXACTybP/YqDoJOK6JS0dfCmlhx+bavtUAbUced1zeiaNh0YjzVm7BqMGYogAs4ERBAMdY5Dq4/wkYNP49b9T+f5D32MpXwf5dtrCh/h2siZ856NoExLTVLHKEXaPrCY98hn3L//hK75lYqf7eThSQHwFQkh7yMxufk0OPrXhT/y2iwoVRpRxx03h5SUmIykRkpKHZWYFFVtfMLGgGCoCYrsegWA18h20eGze47CaAhFxLRk9PYl+NwGNlMxqGsGqaafavoaWY8l58shHZdzzfz0z0xJ/M7tpU3S5oC08eWPJwmA4EPFqMyvHlVXn2znc89zrqI2IbkOte9SuQ6V7xKLSTTvkSQQk5JSQpNiZpjaeBo0QMCjU8J2zgg+RR6YPgBZC5cMJgXrJ3jHBlP9DepOZDvW9FPFINYMUmQrVVyoRrTJOD7T+13xTA1GkTqmL3s8qSkQ8pLBWu8lur3v5vbEpERnVMOKur9EHG1Qb62QBmsIjqwzQ+hOU0wtkM8dwKohWg4wHCbgXWNoMsOJ4FzjCSaCt2bStGLFmd48o6xL0ESVBJlzjE55jv3x59n3mm1WtjOmtaIWJZgjiFCrp1ZlotPm0GznLQ+fXvuRrJXDl6l6/T8BCO3IYK31jbZ07Oa8aLG1cYbRxsNU66fQ7SW07GPlCIsJYsRSAnN4X9A9/DRmrnspruiiZR91DlPDO4eINJ6ggneGmZAQvBgJh8eQGLGQgwfM2Jxrsf+DD/Hsrz3Jb171bGaWzxHFoS4RTahQSk2kYWJ2sv3DS9PVT8ZKt3340pQQZFg98a4JjkTo1Kxs9J7TP33w5szOs716D7F/Fis3QQUpckI2hbUnISZMFVLCYkSHA1bvuIV6a4U9z3s14jNMIyAkVbwIIoIBScG75lpN6IeCS1YfoRhts9nqNV9wDgrBVh3f/IE7eM9V1zF0GYUlzAQTSA3RshWNTqdgXuNr1x4+/+as2/qSXhDiJXNPvCtKNixZrfdfs/xA9uHEx6nTg1CXuKIN+TwkHY96GndQIOqY1wTverTyo/TP3E3/9N30jn0t2l/beQHJwLkm9JkJoLgdQMRz2dIpiBVKQ8lCw+2rC9Nc/8m7eNkdn+Z/XPt1fM3SeRSH0nTBDBJKWdUUe6deJ2vb/ynVMUr44uEu1Ffs/QLjIQ/K5v1yyamz2ccku0Oc28K0C3kPVBtDo4ADE0HcGACnWBTECeYcYPjeBOQFKdbsVqDNwDnUBDfmBFUBge1WlwPrZ3n25+8gFj3EDEQwVQQheUdd5fzYRz/Ih665gtN+goNpqwGheSwCxJjIi3w+n2jfMFjaujVk/tF3A+Y9IuD3f/u3wEhhpNhQCUmoN6x9ZnnrM5qfnQ65Q6SLcx5xgvjG3XAgyO6IIw2v43auHWmwRmvv5fSOPQctB+ySnTRDZWPEd1Sb18Q9C0d49Sf/lO/+2/ewPLM4FhDSWDY2YBgKFtdXOH7JgP+9cDWWjI7VmDTqZ+d5QYxR3lpZy9u3MNkh9jqU3Q51r4NrZQxnp3FUJVQlVpVIqpC1bZZWV2+pusO9eaeNZAVSOCg8kgckBFyWIVmA3CO5x+UeyTxkAckCLs8xSlynR+/Y8xrJmyI2lsTJFAXMjKRKUsOpcq47y6Wrp/nRj76LsuiRdOwxquzmwAqSGcubEzz3/s9y0+D9nJcOS66FmO7GFAPqpHSDfp9zQimeKkHAmGoJxXCIH4xwKTpSdGh0uFJYz/mRwaR/TquVQ+Yb43OHZA4pGoMl80gIuyCwcy9rQDKpwSUmjt+I7+yhHqw36k8VVTClEUY2lkSa6LuMC91Z3nTzTVx2+i6WurNYamT1LggpgTS/xzKx8XCXlw4/yztX/4QBnjOug3sMCHWCvJ0vzFj1T/N7TpFv9+lOtsicUI890cmwQoYVblChw1q2VW8K3iPOId6Bb87iXaMbncB4KohzjWuO7xE8pkOgpnP4JYTpy0ijdbBmDqOG2VgUjZWjmZJwPLDnKK/70H/je257N+dmDoImzBoFmZI2U9dsPLyGBKguRAb1FC+q7udd6zcjGA+7Ds4SQJOM1criXOedE1P5IekVOJSk7FYRnM0KNiu4OcdwgVfFwnKn2sxxoUHcjQ9pGKK51ubAxgHBsLSOCy3a+76R0DtCGq2RND1qsCo2NsDMkJQoJXDf4nG+97Z385b3/QorE3sY+YDpDlDjZErH3tA8CHKBrUTcNvqdea6tz/Cezb8is8TD0niCAKMqYiGE+acd+pTv5s8clelxjOiKlT6t5T7F0jZVnb5bvSFOx5FdEUmPXjM2WtL4bGOX3AQZEYpj5NMvRLIFUrnWjDqgpqjpOIYZmhI+1iy3J/n83GF+7IO/xTve8zNsZG3Wih4yHvHHGv/odSOpJYBuRGylxgqh76e5PC3z34cfQ4BzUjQxQYRBFWmJm72sV9w+1/Jvi+hldWyAcJuX7GHj0j2sHdtLWfgrfB2b0bSIEZuog9LIjEiT2BpGjbktRIY4d4CQPQtfXIeqJ5XrjxqgujuaaoqoUonnvvmjRIT/+rs/zX/+g59jteiy1JnBxdgYuUNXXwDCzmeGQb9GV2skNDnn0E3ytLTMb5afZhPPFgHMSGZsVTUB4Ui39+MnZiZPzu2ZfDuQBTcsG1cQB3U9UFPceL6JNZNFdex2ZgglSglW4OtLkLgHdBZIoOsgDsRhaWfqNPQlKVJmBUsz+xCM7/jEe3nj37yTa899jgfnDlKHHB+rsX6AlBp57Jwbs2YDgnNuF1zUoGoGqJFKSildbkhneV09xZvDUU7QB6BWYyNWOByLrS5u3n5oa3P4ucCgNRZAgY7aT5Zx6YPRgYhvUkUzLCZEK8TVaNnFjQ7hqj1Y6jSeYhuNFHBuN7iYOFCIGGvdGWJ3mqzc4uV33sIPffwPuPHkbQzyFp+dv5RAQ4ONIGzUpDg3ZgBFxrK5EUyKyBiUZOOYoo2TOiVaopCCH9WH+VNb4BFpsYdG7pfJ6EvNI6M+yUUspcVgUjSIi6ew9KE51/76bdMf1rJ8foJZxIZFO92Z+q3LdePwET9cFLSN2QC11UaneN/ocTVEmmgvYpzrLjKfJ1524R5ecN/t3Hjf7Zw49WkQuDC7jw3f1BS1QQ9z4HYKKik1IOyqxUY0NUFXARmzREOjzXSLiHMMLNBJA77LneOnw3EWrBxTnjDCKFPF+soyG/3w22FXYIigdU3Rdbcnmbrd6wDanmoty7Se+HFWJq6gDKIyItlK436AOAFNqIYmqHjDF7Ixavm/efWFP3to3+0fJT/10P5eeeGamzud47938HImq8TRWHFljBx2RnSeMwjW2EUjKHfkL6hziNnjBCFjKCw0QVVUG69LgozLHC/lLO/w+9mWjI5FUlKGVcWgHmq5PHz9/Lp+JtCWXXAtz6gHHqeRMDmBbWY3pKXur1ZDf4P5EqVPimlXlYn3aBS0FvJuda7oud92Lff+bHr4d1XV7X/fZ/6Mm++7m18IzyQ/ciU2Gl1b5/b9zMhrw2DIsVHF8brk++t1bnRDVrMO22aNxB7LZh2DgDS5RUpjEMxwuSCTglaxIabxjwNqF5iPK1yT1vnzsI/DRMqUSDGi0rqlPZ3/qs/6BJtzux5g0ob7S8JQGQ4mf3n4iHuDkTA3IkbFNI0Do8OcIw4UXwhT+4f/utUd/Zpk3apORUOjER7JFygnlpmojEOpwkblp6T2/1Javd873bHfubfIDtxbdvnTQZvX90/zg3GDXmeakSnRGi8QMwTFyU6mI00ALA0mAzYr6KBGksdRN3HClMonMqvYowNKa2oPrSxAK6czKBd1UBJ7PYKGbDdoBV8jubF1qvfe2LdXqh+SSFga5/pj4yVA3IbQ48GFo/GbQrc+WW4aqXLj9PWJBQjDUA9RE7NJPnzl3r1XPzRcvzuF4d7NrMuvhEuolu7mlXGFYxOzCA0ITSYIOENUcU4wc9hIcfvB2koa1btZpVijY9AANqAdh2juxpVqyHFkneLpK7X+xprv/LOQL280NOgUaQfWz079Xrlmr6Tok+I4wGgal3gdeKXehGLOPTT/LLnOLaWNauAwe4y+/CJNnCP1S7ROZIcWmJnorqW6+vb7i/jhmQDn/SR3pUWuPH+SJfE8qztFpNH1Irvk0GR7XhsPmHZYbthmaijXDPGGG3sBOkI17hZiEaGsSpY3LsRBmZ8KviD0Tp8FoJ2POLey76c2z3e+3YdttFbQ1CQgDUIgEEeC0zrtvca+Ieu4jeGIhr4eayyMq73CqDKGlVH1R3QOLdC5/AB5t2BzWDNxeN+tU5vZ3Vur6yfwGalqs2ei4DP9PtM+cKLdZUtTMxWcG5fTx0I0GFzi0JQgggUwalyTaY2/H9nUsFsDSClRlSWTxeS6zM3+z20fCGSerKUsD/Zc98iZ3lscG6g2bv9Y44VGeMSYc+iK8pXT5eYj1b3gsjab1sGiEFC2yDgvEygFmNEroAOspozW7CQSjM3ldQwo+o4M/kwmWieoMlqjwFQbZsi4ezRgX8gpvCNaoxN2En0bGW5SYNGRNiMuNQFSncc8mCq5EzDl83TINZHGSrLVKujk2Xw8s/Rqtzb8peCcUI8C5+/Lfj+VI0IoG+maEkaDvIyTmWorMXfp8DenL/Pv6293oAuuEIr+gFEqOJ9P09HIv6g+wpX1WZ59+TovvnyO55UZH18p+P17hwwf2KaTNyyaV4lstttnugNbwuV+SKsQpvCcr2vOVCVXtNrUjfDFzBoluG24q3KYdaT1hDnDmSDOEHOo8yzELT5dHOBj+T6mU7+ZTGaUo5LlrY1PZpvpbROjSHCnzrIymvvezQ050vIDUhkb1xcH3sN4QSONHEVHL8xfmn64XhFS1RC2DowgieHkFNPbK7xD/4Dn292spC7DTpfpds70hvBThzc5vHeen//kfnq9iPMOFyH20g3LIeOSwSqv4BxbrYIMR9FyrFSRSps4oONSV0qKiOIuD+hYDiuKmSBqODWcN4gD3jn9NZxzPU7EZdQJThwuz+lE/8Z6zgab+zxhpbd49epm/hOu3MZ8/Wi0cUAyzHmojRQD+7557tX+yg6j9bRbdjInDELB4udO8XVnPs6JvWdY9nt5SHJmY8VwqCwPhdPrxnceWuLms5Pc+Zkhe11NNdeZO3147kaqwOvr2zgQRtxTdMlU6bQcKRnDZE16oYZzAhuKXBqQSwO60eQnTe7SBJ8K40i1xCc6l/Ke7jUcqNZpatFNbtJxgQ1vL45lvNlrIqxt915RbW5fG+oS0jjY+dBQnjVrdqPYYnq2+sDs0oM312c9Wdqp7cGwgvlMOT5aYZS1WPKTLFI+jhAcUEaHDIbseeZetvbOsu+RM/zt3vZvazvwtqW/5FXxFHe3JshRNAp5IbgSUtWU0BWwBFIp4ZocbQlsK87L7mqwiTAdh2DCf5h7IQMJ7ItbJJHdUlo3zzGT14xMftqJ02CPrMzbqElZGScwQhxXehyxguAj+ya3XuMe3kIq14yEQFUpuSqXdQfQzilD0VRxvxgNCmgZ2ei2OX39Mzl9+MF/c+XK/3nZL576KN80eIh7igkEIUtGihCia8ahGi+ti6BrkXBphntaTlpNiDWgONcA1E4Vs/Ua/3bxZdzcPspV5RJJ/G7wrGplZdAnhLCQOn4xOXcmdNn85DAWZTJXhHGBw0wRNcwLsd1j3+iBn2h96I7TQ2k3WZkT6mEkRuXqFxyms9BlbQDFuBTFmAYTDsWxrRkliUHWpj0azj/v7Iff8K3nb339a1buoluV3NWewUoliwn1gveGCxB8Uz1WE6gNIvgbWiQPrlJsvBOqUpiSyH7b4K2Tz+LNnes5MrhAdM308btMBsNKcXVJp1+WphBaC9kftvPseasX7AdCrJvA51yzuLlVMXHYfn1yq3rb8LZN0qxHHFTDSKoTVz3/CNMHJhhu10jzGqxZU8Jh3YLqGcGq49e0V/ZNSTxAlS79mfX3Pf/QcKlFnbgQJjjbbhNKRWtIQXBB8F7wQShUmjKDN2xFyZ5dICcydDU2DDWeoketT2GJN+bP4D9mz2RPf5nMG1XIcD5gwfDmEOcBI5r8rSdbcSih95FPbQXJ3zCcO35jGXqHrEr4AN6l0d649HM5/HKdt8mzDJ97ykrZ1MCJF17G5JULLK8NMQtU5hhKoDLpzKTVX297+7ZMdBKMqzvbVBGq2phyyloxxdBBqo1clcqB84Jz48NDcEK7EiJGuqCEowH/jW3SVkITTEvJgTQEEW5LU7ypdTUf4BC9zYcRjH5vghByQhYwbZG8xwM+hJG0Jr+nXmiDKcGt9WmPllfmQ+fE4OjlP5DX5cFIeLBV9t/bi+tn+yySxmlqGaGuEi978R4OX9Vme20Zy5uViJEFulQTB2X59ozhlWQ5CY+ZMVCPOcOCMRrvGvGqY7ZtVoidb0KQE9AcWltQjIRqNeL2erJv6+Atko9qwLEs3cEfFpdduM3ve+iWzpEz65txa269r73cOT/T7ZaDtBiLetESsyn2i5AVA+tN3CJZ8UseecCNKTRonlGrgkl/0pc3zRVDNkcFowpqAjvJqQis9ZUXXNXmu66KrK2f2l0KxGAjFVzfPv+qhXzryiGdncQNsSZAAbs5/Q5F7Cwq7TxkXBfBZUK+JqQzSnYI8u/MsVaiv1bcu1R3379Y2Pt+aeaf3P6H01esU25w3f4Z5M6T3LcxydRsorfnMI88cgbf6pDRZuhWijzrlL7ba5b1NDGuqzXL40JDIdEcZXLU5p6w/267EvYvwE88fZv5UZ+J9PiNSEMy9thgTvE7lu9ys+zu15HdhQ0Zb4t5HFMYyITASdA7I3IZ2KumcHt6fzQ4k/3Geab/YrUMdLINKl+QM2B/WsdVBaUoEowkiTpW4+Kt7OQRZcNwOs4LHq2qPOk9QvUosXdfm4fm9nD3MJFaj/98oIHzxfSffGt291vyqqZ2GTIGYXdRY8dg49HrxzQ35UgnS+qPDMm/tlPyisX/8hBzNx3bjg/6ENEq0XZKkERu9e5S2Jdu4+rxoy99QnvSAPggTEjiM/dXrPefuA0tWuJT0d8zf3D63z13//mfT8mRaDI4GwO/e4bH33PQnTCGdw3Y+ISM0ov2/9bguQu/2K7zc/Uwg8lhQ6ry1d/Y+qSfaK2c7Pwq0+dPgmVP+Nxh9Ddr3v30K97UOzDBdfGBn48mRBcwbQqmqtZMP7VmqS8anSzSrhP3fFb03gcmfy178fxbJq+ZPlcvV/T8EC//sBvanzykZiTvSXkL1Sf+mQJZglYGd5Vzb5rU7fdP+9EbijS6sW1Vr6yNlBwaQeoIpZIBZ/v5ybP32ftuXpp969YVvdMvmBrhVku8KnyZrS1frfaV+9RjovgXNkMIonQpKX1x+2p74ttWVqvJuqxfdLRbHvdWHibo9MDaq3nXP7hWFXf+/l35X3ZTSWvR4+sSKRNM/v2M+orMeer/Bv+Rt6cAuNgduNjtKQAudgcudnsKgIvdgYvd/i/QdBaFFkg46wAAAABJRU5ErkJggg=="
icoData = base64.b64decode(ico64)
icoPix = QtGui.QPixmap()
icoPix.loadFromData(icoData)
self.setWindowIcon(icoPix)
#self.setWindowIcon(QtGui.QIcon(root_path + "\Assets\daz_ico_64.png"))
#Set Widget
self.mw = QtCompat.loadUi(mwUi)
self.setCentralWidget(self.mw)
#Set Window Title
self.setWindowTitle(("Daz to Houdini NEO"+" v"+VER))
self.setFixedWidth(361)
#Set Stylesheet Settings (Default to Houdini)
self.setStyleSheet(hou.qt.styleSheet())
#Generate SessID
self.sessID_gen()
global sessID
print("Daz to Houdini NEO: Opening Windows (mainWindow) SessionID=" + sessID + ".")
#### TEST ####
self.mw.bttn_debug.setText("")
## Base 64 Icon
back64 = 'iVBORw0KGgoAAAANSUhEUgAAAWkAAAA9CAYAAABvE5gbAAAACXBIWXMAAAsTAAALEwEAmpwYAAAGymlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdEV2dD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlRXZlbnQjIiB4bWxuczpwaG90b3Nob3A9Imh0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9zaG9wLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTA2VDAwOjA2OjAzKzAxOjAwIiB4bXA6TWV0YWRhdGFEYXRlPSIyMDIwLTA5LTA2VDA0OjE3OjA3KzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwNDoxNzowNyswMTowMCIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo5ZDQ1NGZhMi00ODEyLTZhNDUtYjRjOS04ODM2MWRjNGM2ZWUiIHhtcE1NOkRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDpiMjcyNDkyNC1hZjE0LWZkNGItYmMxYS1kMDg3MzA2ODM4YzMiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpmYjI1ZmI0NC1iN2U3LTUzNDItOTJlNy02ZTcyZjI2ODQ3YWEiIHBob3Rvc2hvcDpDb2xvck1vZGU9IjMiIHBob3Rvc2hvcDpJQ0NQcm9maWxlPSJzUkdCIElFQzYxOTY2LTIuMSIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmZiMjVmYjQ0LWI3ZTctNTM0Mi05MmU3LTZlNzJmMjY4NDdhYSIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wNlQwMDowNjowMyswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDplMDQ4YzdhMS01NTExLTU0NGItYWMzOS0wNzgyNGVkNWU0NmIiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDA6MDY6MDMrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8cmRmOmxpIHN0RXZ0OmFjdGlvbj0ic2F2ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6OWQ0NTRmYTItNDgxMi02YTQ1LWI0YzktODgzNjFkYzRjNmVlIiBzdEV2dDp3aGVuPSIyMDIwLTA5LTA2VDA0OjE3OjA3KzAxOjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHN0RXZ0OmNoYW5nZWQ9Ii8iLz4gPC9yZGY6U2VxPiA8L3htcE1NOkhpc3Rvcnk+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+UbX8pwAADtRJREFUeJztnWlv3MgRQN+MNJYsyzqslWMk2Q0SJED+/y9JgAQJgiDJxru2sl4fa8vyIWuYD9W1XVPT3eTosGdG9QCCd5Nz8LFY3SRHXdcRBEEQLCfjL70DQRAEQZ2QdBAEwRITkg6CIFhiQtJBEARLTEg6CIJgiQlJB0EQLDEh6SAIgiUmJB0EQbDEhKSDIAiWmM3WzNFo9Ln2I1g9LvPnWGSdocvqcp1bp3PzhrDI7beXuVU3bu8NirTu/G5KOri1DBHkyPWvWq4X7NCrvCHbHyrHaWE/rlrmTS4b3AJC0sEYEdMoDdtxK6zWcG1ea5qdrtvuCtNL6/dtz9NVhu247Y9Sf9pYrzXdb6+2fTs8NeNTM+73I7hlhKRvJyriMfIf2AQmZngjzdc+rl+SZk2oYzfdl1Naz8u7NH3IyaEkSytCP8+vMy3Mt8tM3Xhfea1yVMoXwKfUnZthnR/CvmWEpG8XKucNRMpbqdtO/Tup20idRtYwK0Wfihg3hmvr+Wle2EPHa6KuCbolZy9ymI1w/bJ+XuukUFrO75cV9EfgPfAuDX9ApH1R+BzBGhOSvj2MyXLeBnaAe6m/m6bdJUfVunxJiIvI08u4JvhWOSO3fC369pIuRbhT2jK9aMxrCd1H1SUZt8royIL+hMj5HfA2dadp/D0ia7vNYI0JSa8/Kq9NJFreBfaBvdTtIrLephxF19INpT7MSte2sPBpj1Y5Nj/tpV5Lu5QkXZJYKVpm4LK11IWfrhWQuowdbqVGNJq+IEfSb4E3wE+me5vm+fWDNSQkvf6MyNHzHnAIHKX+HhJJbyFitlKZmg7KuVSVz9isZ+XuKwJL+2bLHJvhIetchlZF36gyv7TukIrEoeXaNNQdM11lfYbI+QXyO74AXpOj6pD0GhOSXl80spwgaYx9RM4Pga+QCPpuWuYcuZw+RQ7+j8zmPqdmvCa5kRsuRcwXzKYbSvJqbUfpE1NHTgnodibkylHtaw7eVp5OXFmLnixKJ5hSVK3T7ZWOrcjV/dpBfidNT22lfdwAXqVyzun/zoIVJSS9voyZjaCPEUEfp/FNRIinSGT2HHiZxj8yG0nb5mB9Mhgxn/eF/ii0Nb82PnQ53a/SyYTCuKZ77JWBtnqxnYp1w83fNOv5ExRmmgral6kiVjnvINJ+QJa4l71WKgZrRkh6/bAR9DY5gj4mC3oDaS3wBvgBeAr8mMZtFE2hb6mlM0py9Pu4zFGfrzy8LPpbWGF7+avktdPfbYKIeQv5DR8ikt5N+2Slr/s55CQarBgh6fVkk9kc9DGS4riPCOItktZ4DpwA36Vp/sAfytA87pDll4Hr2j/bvO4yaI76CMlJf4P8jvcQadurHO2fF0sKVpaQ9Hqhl87bSMT1ABH0ESLsMVLZ9AL4HyLoE6RpVylFEXxZLpDf5gly5QPyG+oJ19+VqH17JRSsOCHp9cDmUlXQtpJwHzm43yLR85PUvUCkHe1tl5sp8rtpGmsTuULaT/NtE76OfNK184IVJSS9PoyRS+NSBL2BHLg/At+n7iW5gjBYfqbI76cVlJvI73zI7C3ltp11/LZrQEh69dFKqDtIrvIQiZ4fIJfEKmgfQX8koqxVo0Mqem1Twn3kt56S71bUik8VdfzOK0xIevXRqMq2hfaCfoG04HiCyDoEvZpotHzC7MOwdpCTsz7zw7abjqZ5K05IerXRJlx6u/ch+U7CCXLA/oRUEoag1wMV7wn5BP01cpI+QCoYVdTx9Lw1ICS9umhLji0kzXFAFvQWcoC+QfKYT1I/BL0eTBEZ/0C+a/IbJKJ+kObpU/P8jUjBihGSXl20Da0K+kHq30UOxlNEzI+BZ8hBG9HU+nCBtNZ5ipyU95D/wD2yqPXGJHtrf7BihKRXE71NeYdccXSIHKAggn4GfItcFkczu/WjQ66WTpHWOvfJj6DdJ6c87JP1Iu2xgoSkVw99GI+2h9Y89C4SXb9Hcs//RQT9gQGXua0XYQIzLyVuLeuXa73MWMsZUrYvp7Tu0G3U5g9dtrZPQ8odst9DPxs5P/0K+CdyFfV75ArrgPk3vEyJp+atHCHp1cI+F/o+IueDNKwVha+QFMdTcgTde1AmgWiF4xxd171DHplp0SfpbZMe1tR13Wu33h5yC3qNna7rzpm/ndmWfVbZ/iSV7/mARJh9y+nT/6ZWwE7GO6nvt03XdYdIxWwtOj3suk7nz3zOwsnoCGm7/nNZfSe5xBT53V8Af0e+r1+l/kHannYaTX8iRL0yhKRXB/u84V1yHnovTbtAKgqfIGmOwYI2tGTqeYTI8HnaziTtyyMkglf2e8q9m/pW0o/SuJY9Rk5IW4jIlAnSxNAKdIf8QoMnZjlSeYq+Ouxr8hVHiePU/7Yy/whJLXkOkeNLpVv6nHZfDtO8RX4DRSsSnwF/RQT9FblpnkratvaIW8dXBP/Wi2A5UUFPyBI6SP1t8q3AT5HLXq0kvKmD8DBtw0Z+KtWLtF9XKfuMLGjIt0WPkRNUizNy87TWfnxAhPgaEW2JLXJLidIVxktyHtgySfv5fG6NMntpn/f7FmwwRU7MJ8Cfke9hTBb1g1T+Dvl51Fd9gULwGQhJLz82gr7H7A0ru+SHJj0G/pGGbzpK2mM2orW8Yl5ai7BLPZp8Tl2onp+opG4Ky9X2d4/8MoSDyjLfA79g9lj6JdI8bmgl3U7axgXD9rmERsdnyH/hT8iJ8w655ccRWdR3mH3edbCkRLpjedHbvfUtHRpBW0FrReFjJIJ+y83nG7co5GcN59QF3scO+d19rfKHMFSQWplWYpucyqidHKZpmUdIeuUIEW4tfeLZJX+fp8hvPDQC91hR/wVJsfyRnJ/W5nhj5OQ0ZraZXqQ/lpCQ9Oen9FopHbYvYrVv6dA3emtLjn0k4noP/Af4F5KP1iZXN8nGgG0MFVSJIWVrGqLFHpIC6kOjZc8usyeMszSttOwpIsQj5Ld6PGC7dvt6IngN/IbLSxpmX3n2N+Sk/Qfyc10gX5ndQb6jD+Rnftin59XelhMy/4yEpK+XkoA1ItZh+3aO0rgKWt/MsY0cYPeRA/oe+Zkc/0aEoO8lvA5Bf12Zvoh4hpYJ8h8cIlPLRmOeVmBOmK3A9OiVybiy3C6zVwSvEcmVJA2S5vkt8gKFoegrzmwkP/Qk1EIj6ndpf8bAr5HPq4+t3UL+W2/Tctqu+sKsD+VnVpc6CJHfCCHp4ZSiXv+uOt/51yaN3bQN17cR9AQ5kPQ9d9vk271fIZWEJ4g8rvNuwqvI+DJlDs0x1zhm9rOfI1Gvb43h885bab0nzKPytKJUeY4pf9dHSAR8XNh2DX8igHwyaJ1ghtCRn93yHbLPj8jvTByR02gaTWsLEHuXom8N4l9Q7IdbAm9F50GFkLTgK0/GbnotAvYy9gL2krbTNirrqKTtC0k3kQPuTepekV8a+yVu994aMP+ykeCQsm064BntHLmiLUaUMRLhl6RbS2u8plyxqS1OtEWKCruPQ+ZPYGfkZn9XQeX5ARG1vqxWbx3XF0TojVH2oUxe0rbzt5n7+V1lmu/sdEzf/xa3XuS3QdJDcsA18cJ8JOyHvZCthP08/xbp0luptYN8268+h+E9cnl6hkQ/nyMH7TlDWjPUGCOSKkWofbynX9KtVMci+Ao/yz7lSPYUEaiV9AT5vCrbl4j8SzffWDTNUfr9tALxMm2mLXrjiqaT9Oad+8gVmgYF+jzyDWbFWZKtitneam6ndZQF7uXdFfpDInH72UrDa8c6SNpHwV68drgVGfvhmjxLUbOXsJ3mRexPBBrtqIA1P6jr6oGmUY7PHX4JWhLZp5637WOKfP5a2dqG+ro4Q1o97JhyJ8h3W2rxcW6W0eFHiOytbE+QZniPqZ9ED6hL+DXzJ4PLoGL7hHy+c+T71eaJ2l7a/lf1Kk7rQzaZF7f2bUTdufG+SLwWcbfEbVugtCTuj4uVlvgqSLoVCWt/SH7Yy3hoVFwTtc0l11Ihdn8+IAfIO0TIdrgzy3rZ2z+ov3y8Nvqe3eF4hrRCgHRbdRreTd1V8to/AL8rlL2HfC+lu/uuwgkS+apQa6kO5ZScS9aThj9x6I09D6nnlneof5ZzRKC1/PeiWPFpZG1vZtErty5tW//bWi+iwt4ynT8OaqmMmowvmJdzLU1yUSi7L/ruk3dN6EvHl5Z0LQq2w32RcUvEtQi5lsIoyRv6BWzzfxoV++EhfwYt56P7Lm78D5We3XGMHIw1HrvhfeQ5EcoZcnOHZUi7Z/sIzSnSpPAhkkawFVpeakMfvVmLjHV7L8k3lGwikWaN07RfYyRlUEvraLM8jbrt55zQf7Xx3OzTdWDFaP/DmGH9/+lwC43E7xS6LeR7tCcBL/NapFyLqn0FZq3VyVXkvZSR+GjoE82uso3GeEnE2h8i5VpTNivXlrDtsqVtljhHJPqRnCvWbunPykHgKAUD14WKXNMn2rfT/bZLQi1JuBatlyL6RXPeLYH74dL4wjQ9fE2SLi24iIztsJ3mBVqKaGstLvx8XFl+3+wXoZGX5n9tFyIOguthRM6Ba7NT2wRV036QxWlTgHZ6S8R9LUxg9gqiJvFW3w+3ps0vdM2SHipkO3yZlIWXtV9Xf7zSuN9m6UNeMP+8XZ0WIg6C5UBFvsFsE1Xb9+JWWhFzTdK1ysmhEXdruDrtuiR9lbSFnVZKL5TkXVvfTy+hZ9yLRhciDoL1wLekskLXXDjMy7MUFfeJvbScL6PVt/w87aqS7pNzbbwk2CHzS9ts0VFvYB8iDoIA2k1o/ZV4K0IeElXXyqgJu2t5uK91xyJpjFLf4z9EbTmbohgxf1lSyiP5bQzZhyAIbgcauLWoNTIoXbmXPOLrt9RjpX5puSKXaYLXl2YojQ+NjGv5ohBrEAQ3zZA26bUUre33rb+Qzy4j6ZL9fVTc2olaHigIgmDZGeKrUkrXRtILsYikbeGljbXGQ8JBEKwyi9STKTUnXmskXUtX9Ak6CIJgnfCOWySFe5X57dYdQRAEwZfF3wASBEEQLBEh6SAIgiUmJB0EQbDEhKSDIAiWmJB0EATBEvN/Y74anLwGy/0AAAAASUVORK5CYII='
back64_Data = base64.b64decode(back64)
backPixmap = QtGui.QPixmap()
backPixmap.loadFromData(back64_Data)
##
self.mw.bttn_debug.setIcon(backPixmap)
#self.mw.bttn_debug.setIcon(QtGui.QPixmap(root_path + "\Assets\echoprNeoV.png"))
self.mw.bttn_debug.setIconSize(QtCore.QSize(361,61 ))
#############
########################################
# PIXMAPS #
########################################
#Header Image (Base64)
header64 = "iVBORw0KGgoAAAANSUhEUgAAAWkAAACXCAYAAADXlKqTAAAACXBIWXMAAAsTAAALEwEAmpwYAAALaWlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0RXZ0PSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VFdmVudCMiIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczpwaG90b3Nob3A9Imh0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9zaG9wLzEuMC8iIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIiB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTAzVDE2OjQ4OjQ5KzAxOjAwIiB4bXA6TWV0YWRhdGFEYXRlPSIyMDIwLTA5LTA2VDAwOjM0OjExKzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwMDozNDoxMSswMTowMCIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6Yjc5M2QwZjItM2E0Ni0yYTQ3LWE2ZjItM2Q0MTM4MWZlOTgwIiB4bXBNTTpEb2N1bWVudElEPSJhZG9iZTpkb2NpZDpwaG90b3Nob3A6YzNkOTYwYWEtMmUwOC0yMTQ3LWFlZjctNjhlYjVjMzRjZDExIiB4bXBNTTpPcmlnaW5hbERvY3VtZW50SUQ9InhtcC5kaWQ6NDZkY2EwYzItMjhkMC1mZDQ1LWFhYWMtZTJkMTRiODRiNmE5IiBwaG90b3Nob3A6Q29sb3JNb2RlPSIzIiBwaG90b3Nob3A6SUNDUHJvZmlsZT0ic1JHQiBJRUM2MTk2Ni0yLjEiIHRpZmY6T3JpZW50YXRpb249IjEiIHRpZmY6WFJlc29sdXRpb249IjcyMDAwMC8xMDAwMCIgdGlmZjpZUmVzb2x1dGlvbj0iNzIwMDAwLzEwMDAwIiB0aWZmOlJlc29sdXRpb25Vbml0PSIyIiBleGlmOkNvbG9yU3BhY2U9IjEiIGV4aWY6UGl4ZWxYRGltZW5zaW9uPSI3MjIiIGV4aWY6UGl4ZWxZRGltZW5zaW9uPSIzMDIiPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjQ2ZGNhMGMyLTI4ZDAtZmQ0NS1hYWFjLWUyZDE0Yjg0YjZhOSIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wM1QxNjo0ODo0OSswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpjMWE3OGVkZi02YWJkLTcwNDktOTA2OS1kNzFkN2QwNGI4YjciIHN0RXZ0OndoZW49IjIwMjAtMDktMDNUMTY6NTM6NDMrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8cmRmOmxpIHN0RXZ0OmFjdGlvbj0ic2F2ZWQiIHN0RXZ0Omluc3RhbmNlSUQ9InhtcC5paWQ6ZDE1Yzk3YzctMmM4MC1jZjRlLTk2MjAtNDJmMzA5ZjQxZTdmIiBzdEV2dDp3aGVuPSIyMDIwLTA5LTA2VDAwOjMzOjI2KzAxOjAwIiBzdEV2dDpzb2Z0d2FyZUFnZW50PSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHN0RXZ0OmNoYW5nZWQ9Ii8iLz4gPHJkZjpsaSBzdEV2dDphY3Rpb249ImNvbnZlcnRlZCIgc3RFdnQ6cGFyYW1ldGVycz0iZnJvbSBhcHBsaWNhdGlvbi92bmQuYWRvYmUucGhvdG9zaG9wIHRvIGltYWdlL3BuZyIvPiA8cmRmOmxpIHN0RXZ0OmFjdGlvbj0iZGVyaXZlZCIgc3RFdnQ6cGFyYW1ldGVycz0iY29udmVydGVkIGZyb20gYXBwbGljYXRpb24vdm5kLmFkb2JlLnBob3Rvc2hvcCB0byBpbWFnZS9wbmciLz4gPHJkZjpsaSBzdEV2dDphY3Rpb249InNhdmVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmM0YzI2NjgyLWMyN2UtNGQ0Yi1iNTdkLWY5YmI4MmQzMzZiYyIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wNlQwMDozMzoyNiswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIiBzdEV2dDpjaGFuZ2VkPSIvIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpiNzkzZDBmMi0zYTQ2LTJhNDctYTZmMi0zZDQxMzgxZmU5ODAiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDA6MzQ6MTErMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPHhtcE1NOkRlcml2ZWRGcm9tIHN0UmVmOmluc3RhbmNlSUQ9InhtcC5paWQ6ZDE1Yzk3YzctMmM4MC1jZjRlLTk2MjAtNDJmMzA5ZjQxZTdmIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjQ2ZGNhMGMyLTI4ZDAtZmQ0NS1hYWFjLWUyZDE0Yjg0YjZhOSIgc3RSZWY6b3JpZ2luYWxEb2N1bWVudElEPSJ4bXAuZGlkOjQ2ZGNhMGMyLTI4ZDAtZmQ0NS1hYWFjLWUyZDE0Yjg0YjZhOSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuDFixQAAIv9SURBVHic7J13nGVFmfe/VXXOubFzmJ48MEPOSBBEwIQYMLCsyhrXgJjjKu66+r7qa9ZdhRXXNQdUVBQDKroiKgoqShYGmMTk7ul4w0lV9f5R53bf6bnd0zPMwID9+3xqpu+959RJVb/z1BOFtZZ5zGMe85jHgQn5cJ/APOYxj3nMY2bMk/Q85jGPeRzAmCfpecxjHvM4gDFP0vOYxzzmcQBjnqTnMY95zOMAxjxJz2Me85jHgQxr7X5rewixP65vHg8NPM/jzjvvxFrLq1/96p1+W7x4MevXr2fZsmUt9/32t7/NN7/5zV2+f8tb3oK1lne+85375Zxb4UUvehGXXnopS5cunfxOCIG1lksvvbTlPldeeSVXXnklAN/61rdaXsuSJUvYuHHjTv3O44CEaGozYn/y5vTmPTTXvRNmu/i5EPW8Y/c85jGPvcFc+KWxjW2x/cPCPQ8HSTej1U2b6UY2bpCc9nmm7Wb6PI95zOPRg+l8MVdOEcydGxrb7ck++wwPN0nD1A0ULb6badtm2Gm/2Ra/Tf9sZ/ltHvOYx8OP6XwwEz/MhaRnw2x80fjuYSHnBg4Ekp6O6aQtWnzX+LvVkqQV7LT/W/09fZt5Qp/HPPYtWpFrKwKeTXCbizDXjJmId7p0/LCT8Ux4uEm61TLC0trrpBV5yxm+39PjtyLvxv+mxffN381G7n83SNMU3/cB2L59+z7pc2xsDIDx8fF90t9cUK/X2bFjB56369SIoqjlPkmSUCgUZu1XCIExZsY+HgWYiXRli++nfzfTHN6d6qJZSJuLkGXYWbibLoC1ms/T/37I8XCQ9HTpd/oNEEzdzOkPYCbL6/SHKltsN534ZyPz3T3w2Qi7uc32W6vjHVAIgkC++tWv7giCoPklKgBuuOGGiRtvvDFpbPuZz3ymo7u7O5iYmLDvfe97ASa+//3vR+AISghRAGJAtzhUXgihgcn+PvzhDxfPOOOM4vj4OC9+8YvF8uXLa5dcckm18ftZZ53lPf7xj2+v1+sA9Pb2smrVKvv5z3++8vOf/zwBOPvss70zzjijsY3t7e0Vq1at4mtf+1rlhz/8Ydzqmvv7+4tPe9rT1Ec+8pGJaT+Vu7q6UiCcvk9HR0cxe0HVhBD57BqT5m0GBwdRSnWUSqXKDPfgQMP0Zz5dOGr+vpXAJGbYttXfzf/vDq3It9W8nP59qzlJi8+zzc2/K8PhTEuNVts0k2srMm58bm7N+zYbGqcPnOb9m/tpPrfZzr/V4JipGXYeJGaG32cbcA8lhNZa3HTTTbFSavoLzm7cuBGAIAj4wx/+0NfZ2anOOeecidWrV5s3v/nN/lVXXdX3/ve/f+Q973lPVWtNHMdlYIIWBGWMKaVpmpAR209/+tPOE044ofjmN7954sc//nF67rnnqk9/+tMdJ598sv+kJz1pFODNb35zYdWqVbmLLrpoDJBHHHGEOOSQQ7yf/exnCy688MLhb33rW7U3velNO21z+OGHi1WrVqmrr766733ve9/oe9/73ur0c3nta1/befTRR+fjOK6QjRlrrb3wwgv55je/2fua17xmcxzHjWfIaaed5j/jGc/oOfvsswcBtNY7XUsDxx13XG7RokUDL3/5yzf/+7//+/QXwMOB2QSb6eQqp33f6u9WUvFMgtJMasqZCLh5zrT6rtXcmW1eTj9Wq3NoPs+HV4g6gPykW2H6oGk0lTUP8LMWADkgDxSAYtbKQBvQDnQAXUA30Av0Af3AQmARsBhYCiwDlgMHAQcDK4FVwCFZO3SG1vh9VdZWZvsfBKzI+lwKLMnaImAAWJCdS292bl3ZubZn51/KrimftSC7Zo+dJZh9CZH132h+U2vcf97//ve3p2m6sGn7ABDnnHNOyVq7OJ/Pe77vi1tvvbV3xYoV+RbH4Mtf/nL3Zz/72XaACy+8sGCtXSyl9LJjBIASQnhDQ0OLXvayl5UALrvssvbPfOYz3dn1N+4HL37xi9vDMBwAxAc/+MH2//qv/5q+jXj6059eGhsbW9jX16eaT+a0007L3X333X0bNmzof/vb397edI4S4Kqrruq99957+5v3ieN40Qc+8IGO7KP88pe/3H355Ze3Tb+ZP/nJT3puvfXWhdddd10v++d5NaMxZxpzpNXcaMyLDqATN+56cONwAW5cNubEEty4XY4bxytoPS8OAQ7DzYXDprVDp23XPDeWMzU3FmfHXcjs86KNqblRZGpu5Jh61o050hivzRwy06p8Tngo/aQP9IjD6ZJmo+mm/9Om/1OcBJPgltZp9n9zi7IWNv1dzz43t8a2cdZf2nTcxhtWMkVgOdwEKOAGTomdJ0JjMjReEs1/NyZIc+ud9n1v9n/zPp1Zv+XsuD77Noq0+Tp3kbIKhYJ4whOeUDjvvPOqTE0EAeSuvfba6PLLLw8XLlyYS9NUhmEohBAtJ0WapiJJEgHwzGc+s/TZz362Zoyx2fUIwLfW2ssuu6z2rGc9qwgQRZGQUsqmY0og97WvfS287777DBBMTEy02ia45pprotWrV6c9PT255vN4+9vfXrrqqqvs0572tPr5558/Xcnsn3/++aNLly71P/rRj7YDXHPNNd2rV6+27373uyfIXk5pmoo4jndStw0MDPiHHXZYcOaZZ47kcjnvggsuKO7tA5kFjZdkDkdajXHXILdudh1rvew67hqtsV9z62RqHHcwRZTFphYwRZKN+948j6fP0Znm4PTPjbk4U2v02+CBZm5o5oxWkvcBjYfbcPhgMJuuqEEEmp1VIK2WZM2qj1Y6tsb/qum36W/l6a3591Z9zfQG350ubbpezTA1OGN2Hsxp0z57i8akatWH7O/v9/r6+uyvf/3rJLuehipDAPK1r31tJQgCOjs7vTiO7bZt26afjwUYHx83xhgBiImJCXn55ZdHuHulaXpRfOMb39CPecxjAkDWajXheV7zyxvAdnV1BR0dHQpAKSWiKNplm/b29mDx4sXe+Ph4Y3xYQBx33HHBxz72sfqdd96ZdHZ2qpUrV+buv//+iKZndfLJJw9fc801nWEYctJJJ+UWLlw4TLaqADDGYMykNgSA17zmNcVt27bZsbGx5C9/+Yt50YteVPjud7+7i6plL9EYjw1pubk1VlozqSVafQ87j83pz6t5HGqYVYiarbXa307rm6bPTPt9pvN71OGRTNKzYS4PbjpJzsXwMRNxN39uRdTNKprpv6sW/bXSkc90fQ2ijoEaUAUqTEkimr0bvBaw559/vn/hhRcW4zi21lpyuZwYHh5OXv3qV9fGx8e9arVqOzs7bb1eb55kjWuxcRwLpZTN5/Pq3e9+d/eOHTuipv4FIE477bTCV7/61QlAnXnmmd773ve+xnU1Jq/AEbMZGBgAULVajYMOOkguXrxYCiGsMcaUy2X/17/+dc+1114bAzoIAtHT0yMXL14sMu8KkySJuvHGG7t//vOfx5s3b268XOwFF1xQSJKEG2+8MQLMXXfdpd/5zncWL7rooogp0vBuv/326B3veEftiiuuWHTKKac8oLVOs+c3+SKYrup7ylOekrv00ktjwPvYxz5Wv+6668qdnZ1qdHT0wRoQJU5ibazemlVjDUm2gem61emfp9tNppNuYxzpWb6biaBti7+nCyDQmpCbz+/vEo9Wkp4Lpj/0VhNmumTRShppRd7TJesGaSl2JnM1rU0n8+nELqb11fgN3ECPcQQ9mrVxHFnPJA3vDpPX2GRnkNZaAQghhJBSNu5N82SDqQksfd+XSZJw8803N4hxJ5x++ulBsVgUAA888IDJjrPLhDXGoJQSgFi3bp1597vfnTvmmGM6crmczefzTExM+FdccUX49re/vQLIzZs3m7e97W25Y489tiMIAtvW1ib7+/uL733ve8c/9KEPNVQ0BuAlL3lJ8frrr28sj+XnP//5+mWXXVZkZzLTgDjrrLP8NE3jxz72scGf/vSnOjOvWOzKlStz3d3d6pvf/OY4INavXx9Wq9XSK1/5ytLHP/7xB+NbKHHScpkp1VkHTuXgZ9tMVwk2k2UrNcB09YBu+n42Sbkxd5qP0YqMZzLeTf97Hk34eybpuWAmC3AzWkm9rZaSil2JtpWKZCaynv5/s7G0oQ9v6MIbhhQPR9ZV9o6oxVVXXaWvuuqqiaZra0i2Kp/P43meyPyZp084u3LlSlWr1eTIyAhxHNvrr78+HBoaanZ9E4BZv359rq+vTwL21ltvtWeccQbf+c53dpGkenp6xG233SYAs2rVKvXd7343ete73jUhhJBKKVupVOzw8LDO7g9Lly6V3/ve9+JLLrmkIoQQxWLR/vKXvwzuvffeBrFIwHZ2dqqjjz7ae/3rX9/wurA/+9nP6saY4rOf/ezi1VdfXc3ORb/73e/uOP3003OHHHLIlttvv713y5Yt0Xe/+90aMxhwL7nkktKdd97ZcMnzAPuFL3wheulLX5p/ECQtcc+3jSkDeA/u2Uvcs262uTT0ttMl3umEPJtEPJ2Am1UVzd/DzEQ8j73APEk/eOwtkc8kfc9G4M3GyobBMo+bnG1MkXSzTrJxbjX2nKjttPNqnLcFGBwcTEdGRsRTn/pU//vf//4u/sOf+9znOl//+teHW7ZsCYUQtLe3y6GhoeluWDKXy4lGkMdBBx1kV65cGWQk3SzF2re+9a2+1toAtq2tTezYsYPNmzebaf2p7G8TBAE7duyw2TYKMM9//vMn/vCHP3Tm8/kwiqIUMC9/+cvbJyYm2LBhQ9h0ffpXv/pV+rKXvayQkTRHH320//73v7/tuOOOG123bl160UUXVb7zne90BUFQT5KkcfwGBMATnvAE/41vfGOdJp39F7/4xdrrX//67uXLlwfr169v6a89CxoSdDuOoBfiSLqc/V7HuTqO4V7ODSN4w6A2k254OhFPN7DNFsA1j/2IA92749GC5oHekFiajX0hjkQrWRvHTbLRrA0DO5raYNa2A1uBzdn/IzjJKYdb/vbj3Jg6cZL2Ti5nc0RD390slSWATpKEK6+8Mvzud7/b7hw3pnDxxRcXn/jEJ+buvffeUCklswi+BiHs1JRSZEZAcemll9ae85znFM8666yAJhJYvHix97KXvax01VVX1cjUL9Oi/KbfXyOEsMVisfl3deONN9a/853vRNdee21ntj2veMUr8t/61rfiaX3ID37wg/Xjjz/e933fA/j1r3/d8/73v7922223RYD/zW9+c+IHP/hB8oc//KE329dKKclUQOa5z31uKU1Tcc0119SbzoHx8fHk7rvvNm984xtLe/gsBO7FXMYRdMNVrQv3Qq4BQ8AW3JjYghsX23DjpTF+hnFjpdHGcGOugiP2Ko7sd3reTJH59FXOPPYj5iXpAwN7optrlsobKpEcboJGuInUi5vIXdm2zS6KzZ4Os+IZz3iGfNnLXlaK43gnY1gulxNr1qxJ3vnOd8aXXnpp5ZxzzvFWr1696KabbqrEcZx2dnYGz33uc0vPfvazR9M0paurSy5atAjf96frKSVAf3+/DYIAQP7mN78J3/a2t038+te/7rvyyisrlUolLRaL3tOf/vTyBz7wgepPfvKTEJC9vb22s7MTdpb4drp/vb29dHR0NLZpQL7whS8cjeO47/nPf37xl7/8Zbp06VLvc5/73DhTBkALsGHDhrBWq7VdfPHFhYGBARnHMe95z3vGcPNGA+r8888fMcb0vec972l/3/veN97V1SWc1x+8853vLP7ud79r6HlF1rcExBVXXBF+9KMfLbzvfe+TY2Njc3oeOIIu4tzjGr7MHdn5juPIeCuOhCu48TDdy2eeXB9hmCfpRx6aJ5thZ5/ThsQDjgwaRqXGNo0JGzGHl0FGzA3DYfP2DfWHBdR55503+pa3vKV06qmn5oUQVmttH/OYxwz/5S9/SQC/Wq3af/mXf6lu27YtbXEN6rLLLqs15bRQ//mf/zlx2223JRdffHExl8vlpJT2RS960fiPfvSjOpk644tf/GKUy+UaqoLphjuBM/5FuVyuYahs3BeVJAnPetazRqWUolgsyvPPP39sx44dzTrWxrnJl770pePLli0TExMT4rGPfewYOxO5sNZy1llnjZ522mkK8D796U/XwjC0gPfhD3+49rvf/a6hI29WJXhXXHFFvbOzUwshFHOTTD2caquLnQla4NQbW4FNOEl6gplD8OfxCIOY7i60TzuftgSex35FI6quHaejXIxTdxRwUvZ2YCNO2qrgSHt3evRm98NmNIyHzTr1Zgl9+vc0HWv6dqrp7+ZraZa6m4/VLHU2e13sQtJN/TS2afStmr5vnGPj/2Zfb6/p72Y0dPuNbRokO+ktMu1cmw1tM53bbM9C4p5jFy76bxnu2fo4CXoTU8+2QdBzlc7nsRfYn7w5HfOS9KMHDRe8Cab8ZxthsUWc9b9Z2m42FM2GhttgM5qlycZSvtkdkKbfmgl5pvPeSc3AlCqkWYc+U38tXd9maI3jNa6hsW2DoKf31fwyamw7XSfb8BKZ3sf0YIzpng9zVT80Xr5tOGIewD3LgCkd9Nbs/wrzBP2owzxJP7rQTNTNLnpdONVHHzu7Y9WZfUncbNVvdazm7Vr9Pl3X3iqAghbn0Cx1znQuu9PjN0i91fetCHm6yqGxXfNqYPp2rfqai4g107lNR0NSbxgKB3DPsIhTWQ0zRdAT2XfzBP0owzxJP/qgcZN1jKmcIg01SAdT+umGVBrSemLPRKCttpn+90zfzWWbBmY7p7lgtm3n6p0wl+32xtNhLts3CLqEk5wHstaOuzdjOPXGEE7lMdNznMcjHPMk/eiExk3aMRxB53GE3ZjwDRfABlHPtETeV6S4N/h79kJoqI8anhwDOFe7zuy3hifHdpwL3TxBP4oxT9KPTliccasR+tycvjGPWzo3ZwqbwJH2/EQ/MKBwz6mTqZShPbj5OoEj5204n+cquzcCz+MRjHmSfvSiISFXcUviho66EZ3Wz845HRpEPT/ZH140XqpdTBF0s5fOEFNqjrl46cxjH6PN3/02+xLzJP3oRkPnPM5UruEAJ5W14fxtG9K0ZX7SP9xohHx34J7NYhxRF3Ev3EY04SDumc4bCh8CtAWAgdM6BW87RHLaLiUd9i/mSfrRj4YXR0M/3XDN68xas9dEg6j3NmvePPYejaRJnThiXoLTQ7fhnscwjqC3MaWHng9W2Us0iLcZvoBXL5UE3tRvp3XC6X0CG0N7HsgJiB7aqfGIJ2mZC8ALwLhVu8wX6H78KQz+4kaO7NVccHovv71lkBvXhaAEEgsWNAIsSAG5nGR4dE/z3DyioHFL5WGmEjN5OE+B7mybZneyhp5zHg8NGgTdkKAbZaQ6cM9khKn8LMNMJcv6u0YrooXWZNuMZuJthhJQahc7hxml1s2EAqDBViyImZ3+9wceUSStOtopLF9C6aRT0GEFKRS9Tz4X3dWHqE8giFCmnfzBBcSLSly45Xv8W/vP2fzE49hQiRHHdLO0rAmG69xk+qmO1Dm8B7pNyrduDZlYt4GRuMhYzfLzX61lrGYIo0eFsGJxQ63CVN27RtWOZqJuzrw3r/p4aNAwEnbgJOhlOILuxFHMKI6gN/Mo0UMXFahmImzC7gi2gZmIFmYg22Y0E28zLFC1rW9scxzqQ4wDlqSF5+MVA7qefAY66KO4aiEdJx+LMD5q+UEYL0FqH7aMEecLqFIOQYgMi0RbNqJ6T6a2rcCO1SNUl/awoCuHWFBgUTkBZegzBQoSFvaB3F7hwmcsYvHCApXtvVRixftft5KhoYRv/HwTRilqYzFf/8EaatEjVoBpEHUjIrG5teH01I3PPk5iq7LvSnHNYwqNaMpGPo4OnHFwMVMEDY6gN2VtkEe4cfecfsFZfYJXLxAEHtgW8s9uCbaBmYgWZifbZjxC5K8DiqRlLkD4BXrOPZ2Oc89HFaBtZSc1vRLCQUQ4iKmmJKvvxZYU0vh4kXA15dIQQYiNAM9iqhVMHCM8RZwY6sYgJhLGbUpQSamYhGo1YbwAfqTZtj1kQb3GPdvH0cJj1UKf9pzPh/7tZFQQoEYqPOfZh/GGP+QYn4gYveZ3JNtHHu5btqdoJFcaZ9fcGw2i9nBDvw1HEo3ses1+1Q20iiKkxeeZvp8eGt2qr1b7ziUoZrrMs7vPzd9Pj3Zste3uvmu+lubQ8kbmwob/euO+D+CIuj3br6HiaOTkeMQaCs/pF7xtpeScAeFGV92pHAlabDxXgm3gEUK0DwYHBEm3PfYEuk48jPbHHc9YdAjdK3ySmoYkJNq8laguUV6I50XYRIKf232newkhwFqo1FJMZNkSOdJuGxtnYHEvh7z3RQwbH/Hq51C/8342fuknDN9w6347n/2AhscH7JxECRxB9OBIuoOpqi51dpWoWxGumOG35mNPx+5Cxxuh2c3bzrTPdFJsRbZi2n7Nv8HcyH02gm/+u3E+sun/RlWdHC64qKFu6sR5cWgcQW8EHsAR9BiPwICVDg++fpzkmSukuxM1i21+mvOYEx42ks53lOk45wxKxx5B59mn4S/uIl23HjFYJt68HiNyCKHdUH6Ys+kJAVYIJiZiqvc+QKTy5LEseOqpdJ5yNOM33c7Gb/6CoV/9+WE9zz1AQ6IezT43CM7gyKIdRxhdzFyBfG+k55mIvfm33W0/U//Nv7Ui3OnHmYmUZ9pvps9z/a7xQmwm6QLuPhey3xrPpCFBb80+P6Ik6A4P3nyQ5M0rBJ1lHDk30nDNE/QeY7+S9NJ/PKfl9+2HrKDj1MNJD15JWrekazeSTIyg0ghbr4FnH3ZinhHCeYWYMKa6djNJLWbBMx7HwHPPYuN3rmP9p69k4m9rH+6znAuac3w0PifZdx1MqTza2DljXisVRTPm+uBm2n+27R/MoJhNlTLb8aZL93Ptp5X6pTlNaXNJNI0zCA4zlbh/O+7ZNAo5PGh43e3YmkslbrPCBCiJmai5P8sFdKU+cwdzwHsPlbx5uaCzI3NVq2U34gCdzo8E7FeSPuytL2n5valF1OsThBu3YSIQ1iKkbLntgQwhJRhLdc0m/M4yvWc9hraBPh741s/Z8sPfoOvR7jt5eNEg6lGclBzhyKILJ1E36iX67FpnsYFmiXQm9cFMKocHs32rbadjJul8NqndTtuuOQve7vqZvu/0c2l8Z5iqlBPiVEqj7FzeqlFZ5UETdPuZJ9D/z+eRP+VI8n+5GxHGjB66DJHGiEIevWEb6XiFjkUL2Hr19YhygdpvbqGyegPEKSbavXvqOf2Cd6ySPGmpgJqFiT3QK89jVuxXkq49sLXl98KCUQaKBRCPPHKeDiElVhuqax9AGcOqt7yQgfPOZOuPfsPmH1w/p0H+MKKRjKkRRl7D6UQbUnQBZ+AKcOOlOb+0mNYa38lpv7f6HnbW1U7vr/H9bMea/nszLLsS6WwNdi22uqe/N7ZpdczGbw2CTti5tuV41pptAA+K59rPPIH+l59Hx5NORihFWKmR6+lE1EP8Bd2INMZKSa67HeNJvC0jLPo/r0TmA8z9m6hgKNy7iaFf3UwMDH/zF5jazpL2TkZBAYxY7LzUvE9xQBgOHy0QSqFrVWrrNhP0dLDqLS9k8T8+mXs/8Q2G/3DbPjuOHwQYvbOAJaQkTfYq/qRBIo16eA3vjwJTBN2oPO4ztVyH1gS9J5/3hpx31+f062r+eyYC3R0J07T/TL/PtP/0z40w/IZqqVGIuEHMDS+avSbo6eQcb9yGSVJMqUBarSPCCD1RQ6QxKEWaaowArxoSr90MxuArhezvIL+gm8X/9hLi3i56XngeY5//HoPfv562pM5Xj5Wcd9C8UXB/Y56k9zWEACFIxyqkYxWCng6O+tDrGL7xdjZ/71eM/OmuvepWSklbdyeLDj6Ickc7STMhS4FnBRObt7Nt21ZGR0f35hCWqSV4hJPwvGmtufrK7lQVM6k0ZtpmNhXH9N9nUrdMv57m/6F1Ka/m7WdThUwn5FaqjZn+b96uod9vLg7cSHT1oIyDM5GzkHLP1IlSYuMEU6mTVuok920kHp5ALehmwSWv4JAnn8z3rvwEfdSgJuaNgvsZ8yS9v5AZPqOhMaSn6Hnc8XSfdiwjN93Bhq/+hPE77p9TN8pTLFi2lP5lS8gXCijfIw4jglyTG6IUmDilq7ubrt4eRkZG2Lp5M2N7R9YwtSxvvAmaJdvJK5zhu9k+t/p+tn32ZrtmzNVTZC7bzWY4nEtf0w2QzZL5g8I+I+eZkI1lPTJOUgnp62mjryBgBGyOeXLez5gn6f0MIQXWGOobtyM9Re+ZJ9J21ME88NWfzKqvVp7HgmVLWLB8KaX2dpI4Ik1T4ijatcCvFFitqdfrKN+ju7ubrq6ufUHWDTxi3L/+ntD++OPpf+Wz6XjiSfuHnKdDCIS1pNWQamIp7fsjzKMF5kn6IUKDrKtrNuGVCzPqqzv7euno6aZrQR/lzk6SKKJeqUz1M4trohACay31eh0p5f4g63kcIDjqLf9E+S0vIC3kSdZu2r/kPI+HFfMk/RBDKImuhZPGxaM+9DoGf/dXJn76R4o7avQdsgLleUS1+k7kvEfHmCfrRzWOedOFLH7a6WzZsoNUgNRmnpwfxZgn6YcD0/TV/c96PP1Bmfi7vyOuhxhjZpWY536YebJ+NMDrasOEMdJTHPqy81h87mlMrN2MPXIFopADPa+NejTjUUvSFoGyKSVdI29DlyHIE1gpQBtSK0g0GAvWgLbiIbd/NFQg0bZhzNAIOkkQNrdPCHqn48xC1ls2bqQWhqTxAe3L/XcHr6ud/KFLGXjtBbSvXIIeq4C1dNdjKvdvBE9xwEblzmOf4lFG0gaLwdoIbWISI9qHRFv/RJpva9OV8rLRjZ3lkpeLB9rJj6axVfGYBxNSxmOerW8zaVQJY0OCwFowjzKv/F3IuqebkszhH7KACR+2XHcLE6M7XFZB2MUXex5zh9fVvkdBTMJT9Dz3bGQxT+Hw5bQ97jhkLsDr7cSu3Yzf1wWpJr5rDcLah42gnfBjCHQ679XxEOERTtLO9dQKjSZBWHWYZ4Ozc3LJ8SvRJ/31kOeuulg8tyMnpeinxrGD93L6+ps4triaM0/upHrswYgtIaYzNV0yGU10cfPyBenaNJq4B8utnrW/H9fBGoukaB9k+NcBhGayRmuUknSvWIRdOkbPosWYKMVvyzE6NIhOYka2bkMIsbfBMo9ayFKBvuc9GZEPsMlUnvHiMStpe9xxTvqdc2eSwuHLEb6HHq+hxyuYOCFcuwk/zMhem8yR7+FjR4klUh6JVPh7/g73gTfgEng9UpduAXA38K2H6oCPUJK2WJmirQATPE6mhWfldflpvsofkytKCsonJ1PiXJG69AgFDMocf+g6iS9X/oHz7ryWf/3UlymdnFA59zD8RUrKQdGdatG9qDc5WujwvDCsQTxugrD+Z61rP4qsvlJosxr76Al7FUJgrEWnmrRaI45CvHyASMHP5Viw4mCE0fQsX0FQLjCyej1aaKrjY0wMDfP3mp1Blgr0v/Bc+v/pqRSPXonG7qQX1tUQPV7BX9A9Sy/TYCFavxWM2YmEDzSDYCmqc0/vQr517Bm8/Be/aJ10f2bkgU/snzN7SHEX8yTdCo04gBCNyIt618sLquuV+aBwQoDECyQIjcZQNxE1bRDxKEoKpBDksbQJQ5or8JVznsst9xzKZ7/0Dnp++ztG/ulI8if42HGP2oSHyJeR+W4oCLmgvXJKmlRO8Ytr3m8q1W9OGPEfqTV/8h+lthqjNcZodJqSpClCa/xyCaEk7b29FLvaSNGEw+OMDG5jcNMmTPr3oRbZiZyPOphkcJT6Pesxgn0n3R7gemaJpate4wsnPoELb/kdhWodW1Bz3V3jkkftwdvrgMS6h/JgB9ZrekYIECHaxr6xfW8pq5X39Kpl/9Uh2k4o5EDIlNDGREaTaEusLamxxMZSSzWVRDOeGkZTQVKPOGzdbdx28Cpe+NZLqewwtH3sLqpfGsVSR/aF2OowJhzERCNUdYDndRN3Hs/VK/75wkN6ev84kEu+nQpzVGoeRTqQWaDTFJNq0jimPjFBGifky0WWH30kh5x2GgsOPujhPsX9ClnMM3DRczn6J//Biv/3Gvz+buqrN5COjE+mAfh7gUXQFVa5s38pV5zweJcOah77FQc8SQuRYqmRxp0vDDjyro7yYZ8sB+3LhIqJiaknhki7VWJqLNpYDBZtLaZhADQWbQ2JNlRSw7BVrHjgb9zRuYw3XfhvePlBvF8PEn5oG3pjBblEY5MIm9YJ4h2IaCOX2sdwiziUxxWLnNmz8HlHLey83c/ZS8JUY439+5mnmT47jWPCWg0pFMuOOJyjnnImnQP9c+pC+T5SKqTKmpTus5Q7t+x35c1twSeEaOrvwTeVC1jwimdx9DX/+XdPzjvD0l8d54snnE29v4Co/32spB4uHNjqDhETx/5xioFPlHO9T8r7YEWVKLXYpjQIU8KsxWYudY1sNlJIpLBTBhdrSbVlVCgOfuBOvnPoE3jyMedx4a3fY+vwYdgPbCX3hi7UcXn0Bkkga/zZO5hf2QEOCdexSSo6vBzHl/pFf1D40G12+5MqmudLzfDeXyegjUuGIXdXgbMFpAAp9pw3suySUkmsP+cl607QaUp1dIzuVYtp7+5mZMtWtq5ew+jW7e4QUlEslih3dZFGMaWuDtoW9BGNVV2YMZDGKdKX6NAlfxOewmpN0F7AJBpPeVQr48SVKrWwSmV4BGum9E1SKrr6FtDZ2481FmMf/PLG1iJUfwe9770Ik6bUV29wP0y7wQIQ+3M1JXDeHADWIrRGphojBcLu+fGFNogH6VdtEXTVp6TpV/z82j3VTc9jD3AAkrRACo0VFerRoreU832fbC+WQNYJU5duy9GzxWaDVyAm6do2+LixDWCziWWtq0bcgDbQNTbIF459Jhfc/CPa2kapmQLhR7eRe1s33glFWFvhz6qbuo6JSYitoWZSitKjzytz6gLvyXdE4zcNjsdnWGO37dUlW8D3HLkk6Z5LaFJAarqMYaEQzN0FQ2TJjeNU2mqUAhMIhgWkQklXjMHs/lyEENQnKpAauhcvpHvxANvuX8foph10LhggLyReMU9ajxFKgpR4fgBSuKWcFUhfIXTKJElLiZ/LoUWKMNDZ04vs6kUXfEytSjUcZfPqDZQLbfQsXES+UCJNE9JEI/eBgGt9g0wM4ZYhZHfZOdSrXReeBgpGsByxn6qpCzwj5RZgFAs6F7gVou9JIznYgCfkXAsDWKEDD2F1AoxhGRXWGmEsyKaXwdz6mpSm/+mvv91T3fQ89gAHGEkLhEiIU9WFv/Brbfllz8jnUxJdwSSZ2DfpU2CxNiNn66TQxlLccZxwk9U672khnNHDWCYnsZaKhePbuHPgUH6/9HjOWnMjlQVLkQlEH9mOem8PHJywfask9WIqNkVJiyckaUbWXV6Oo3Ltq+5sH79h/RBH+9qEKifdG2GuiFLsIYsY/d4foBKBv4ePxV3Pi0B8eo9zIUnFxH3bYfU2IzwZ61hvR7AhqUR34Mm/SuwvPU+ukbuRtBueIvWJCspT9C5fRrHQQxpH6HpIWqlgEoMKPGQgnA929vyM1iAbftnuWVntjJdGa4SFJDbYKEaqMoEXcMQ5j6NrxUHU7t6O9D2i0CWjN2YfWXRzHnpwjNpHv4f/oZdATxt2x8TORG0ByfFpXvx+f0nTFpBtuYsF/DdArXQQzr3IFmze3ojQPXN+K5mE0YP6ATTG1MmpbcKKtUaKOzDmZivlL4Sx25yEM/sFPaTS9O6yhT/KceCQtAXlRRhhjkrjVVe3l3tWKlEhjg1OdW4ntzPWSdENYm58BlDSZdW0mTwthFsOWsAIJ7kZaxHCyd/SGELl89fFR3DWfb9DxBraBDa01D62ndIHJcXuOuMTmlClSGPwpEIJiScMsUnpkDlWlTpWRguib+8Ymng29Qih5i7OiYrB7ykTHDxA/dZ1qC6fPRqBUwuKvYPbX2LJW2uWIViW1OMzhHvJGa3t9V49/rpQ8qtSyXQ24brhgx3X60T1mlui26Y344NBQx+epkSVGp7vk8QRnv/gu971WEBnifSaP2OTFP8Tr4CeNmgmajt5WvuVMKxoLl8kGgfVCJtrvcdsfQEIJYQtWyHLFlYixJNBUu0rh1h+4ofp10H8QJjdXdR+kKYbhNzQW9ZxrTnbdqNCZJ6shK+kUXv00UjcB4zhUOUS6iO5J0mW3NTR3rYSM0GcglFFUq9MrMpEskzktZEGHZhCFybXRmosJk2zCEHQxkwStrFOcjYA1j13g8AiXDh4NiJyacTazkWARAEkFtEv0MMGPjvGsuIwY0pS1yl1k1LXMaFOCI0m1IahNETXEg4e6H1WaaDtObpSR8TpnBu1EC/vkz9iCaQ60y2LPW1Jc3b8PWpSODUE2XERSE8ilER4UqbV6An1wYkvxPXkniSMXyWEQOwL0n0QsNai03T3G+71AQAB8qAFmF/cQvK2LyB8D7u4F+Mp1wIP43n+viAGK6bUclNfZkpnJYpIZ3AQxkw2LLtGy+zmYQtjEcZgs7SjTqXlmhUibz3xD3Ep+H69s3CjleLZMtFOh93icU+Xpvfa00OQFWXTMKZhnYZ7DKwFkLB4AaxcBYcfAYcfCasOgSULQXmwgWxbDaPa9dFc4O1RgANCkva9OiPjS15ixxd9paM3IBGQ6BJpVCOuPEBaHURXRtHxBMKCCkp4pQ5yHQvI961A5fIk44MIa7BItDZIKZBSYm1DTy1Q0ilLtBAonIHJvZQNI7my041agxESYotcqtB/tDzmmnvpfc6ZTKzxyPuaxFo8afC0ziRqSWos7WGO/kW9n1ldNz8wnrdHwmNayCHPOBL/13dgxmqIQrDf7veeQvouT4TV5uBwtPY5UY1fWu4uvk766lbClEfVjGhGRtTioAXoX9xC/M6vkLvkfGQhwO5wuTSsEmNpW3BTtsMeuzlYAUZJIVPzgEz1YdpXxzWIU/sK66kdpW2jPxKJBiWJ8iVcXdCWb4ZtGEaYbV67R5UT1nYDpUZgljNAWlcjRlisFKdaJX5QW9D+A2HsxSI12yytpOQHIU0LQFmoGNiCk6aWtyNOOhNOeRwsOAQOPx4WLQbVXF3Agolh21b4222w5W64+Q/YO6+HNcNgNCwEOiSk4hEvXT/sJO0HIZXh8kXarvrvck+Jan2E2tgmkvH1pONbMJVBdFgBnUV1aY1JEmySIGWA3zlA5xFn0Hb449CVEaxJQEi0cYUvpHQihMVibKanNtZZxwFjDcY6tQhGY7WdfBMLaxjtK7Pyu6t5zCnr+d+uVRw2NkgqJYmBVGikNXhCooRG11JKhdzC3gXt/5gMV76jgrkv/aQvSAPJWD3eN6qBfQ1rncQtPUyiH1fdUb05iPQrvbbCl4kfxeHiDYn64AHMb+8guvleSm8+j+CsI7Hrh9CevDUtdz92cts9gMlWL7mJCGHJJ3nvdw1pV/sKnffD3Hh8hleLVwuTEpfasMpDaN3yvSgM77LwJWZ5a1orEMKowli9I83nlyU5eaw09hSj5DOtZBmWKelap4Tdbc8RmJOC8fQCDDftensyabpvKV878Swu+sXPdq+bFrgA8R0aNgJ9AnHuc+DJ/wBPeCaUO3bfgczBwuWucR68EERYhV//BH75fexvvwu3pbAY6FWuxtAjlKwfPnWHtQS5OmPRwCtHxx/z39KGDG2+gR1rf0J987UkI7dh0xFkoUTQOYDfuQC/ewFe9wBBzyJy/UtQ7Z1Eo5vY+NPLGP7jD1ClTic1Z+oObeyk+sNaJzk3cga5zwKLJJaKvuooGENqcU7XWXGjpEPBYMIFP7yJenfgfiezq2SGr8QaYqOpmpTQpHQqcQn1GBumc25mvI4sFwiOWYapRQ/984A5C8QyUFiLqg1NfCkZq71DPNozsjnrNKK/Ax2lTHziR0Q33otY0Y/11F6Qs8AoiR9r2jaPUxipE5f8m3TOe4zQFisFxlN0rhk6p++O9XfnR6vkxiO0H+zuGUVi6oxbNzdkU2nsDix/Fdp+Raa8ToXpSq8S/6O0/Np40umuhUBFKTI1S+KSulHnxNO9yLkAiuaWpPROjHHVIY+BElMvkVYtEBBpuEUjdBnxhncgvnsPfOoqOO+FcyDoWZAvwbnPg49/E3HVfYg3/SvYDrhVQ6zBf2SO0f1L0g3H3aYmAIkhKKZUt/U/f3j94v+x5hbGhq4mmvgjUo6iSh14HQOochcylwNfuaYUQk79LXM5cr2LKCxayY6//ozalvtQxQ5sg5hppCJ1CmljnVOeNg3jtSPvUOU4cstqwGKEdNtnGyltGF3Yw5Ov+yOPv+9eVnf3oYzThWbdTnpsG6Bej7Bt+RPl4p7j0/YiurM0p5aW8shDF1E8djmmEu77R4F7aaV616ZN9hKL0skX3KywIJTAK+aobR75SFqJXuflPaQnkUpO/v+oQkZzsrcd4UmqH7yK6Hd/Qy3vg0C5wbC7LqQgzfl4iaVtqEb7tgp+mDC+sP2GNPCOVVEK1pAU85QHKy9o2zj0WysdUeogIA18p4ueGXmY27vWCjFJnNY9qlSl5rsy0U/IVeMXI+SIaazojJPuTcH+pLKg7fS4o0za1TbZku52CgXFHUccxV1LD3K65BG9axvXcHcKm0D882vg6r/B2z8CKw6ZwxnvIRYvh7f+P8QP70K89GJYD6xJIWitXz+QsX/VHS0GlBHgSRiZWH7W5i3et5T8OcYbRIg8XtDn1BrGYLVFCPdGd4Ytg9UGpIbUGf8wGrRBFcvo+gRJbYxAKEc0jSYEWoDMvKkzATjTP1uqfp7u6iiPX3cLxstPab2sBe08Q+pFn85thn/5xU857+I3UhkOKFideZa47aVw+m4D+BIKRr+4OjZxixfs6nrg+jZIb+f6rsHYBHbTsOtTijlN/LlCW6vb8t5/C8GwtrbQOK4Aa4wlsfiylOvV9fgYaznGpFoIIRCeau2OZQFhyXUVCUerl43ft+0Wr61wg7GZh4dpvAlndQVp+rvVz2LqDdgCjZfvQwpjEJ1l7GiF6ke+TyHViBecjp0IYbTa0pfaSic5q8RQ3jqGH6XofI56R4F6W/BNrTjdDxOsAB34eLF9k5fYb2tfuSAlCdWeTqySMMf0sXvjcGKly7euIvP1XBLekOTVD3TOO1ammYk9NVQG2q5lNF7hSW/IZHMAwDOGsb5O/t8zXso37v88Ih+5821GZQJx3LHwr5+EE06b20mND8P6tVAfhyh0zzvIQa4ES5ZB36LZ9+9bBO+7HPGMF8D/fR32ljvhSO2MjvqRof/YrySdVHc29yqTEhXKbGbpoWNj1Wu98h0oYbF6AVYZpxP2jCPH1DgDiSArtCoRQjurtNAuYb4WWGEx9Qn8zl787iXo+gQWi8zc7CY9PXBE3XDzMELgGc39/Sv5hzuv5ZAHbmOo7yBEmoDKlu/OHQRPawa7+znx5tt5x5pf8Z7Op3D6+HoMMvMPdhNCZTYKnaR4pdxz7bj/Nu1Ya/IeWAMy5yM7c0TVCNtUhSWNDeLQRahiAKnZp7ppF2hpX9dd9F3W7ez9KQTo1DCmJfn+duqbRxDGriJQz9PaXqzjdKmUsjXXWlxdPc9S3Tp2dU6LxSgR2SyTm/P22nkXIQXSUyDwMGYZcBDQZ41tx6WBFFgSYMRoswHDPULJ4YaH1fS+piEAVrB3HDUXSMBgzBrRWU5tpU79/12F/Msa1PtfgJ3mojdFzpriWI2gGpMbniDqyBOW2onbc5+wJn2BF6aZikPgxeZ9UppPN5I2qTgh7CgTtZVQe6j739ubYJREhvHawMrH1srB762NjhfavXC9MC4lHld5tfBMUyy4AQ0Ypejf+gA3HXcCw9fdQbf2J0PMJpFE0Lab3EpxHa77Gdz8C7j9T9jR1TA47gyaaXZBHs5u1J1DdB8OxzwOnvgP8LgnztzvqWfBNbciLnkt9sufgyNSKHlM6i8PYOxfSbopO5oVoJIUkxOdY3Lwt6p9OPB1CaMlQmqEkVjj3IKMzFxppEGkAistQhgnVWfSlZXO9QwF6dgYHYc+Cb/cRzK+FaGU84UGZBNRaxxRuyAXS135pAhe+/tvgVDEUuFbjdV6kqittZBabFExvqOLN938U+55Qh/fDo7h1GQjqZXZMVz/SkCSGjylDlLF/DGpsbdLJbFZ2LcxmYudkoRhgk30FNnUh/HPOILCr+4gvPEe1MKufSZNKylkNdRLtbEPdBadH3bj/TGp7ojTTG9j7/PKuQ96ueBT0faxD2ht3yxl6wAdayxCKfBtj0iTjxaU9yabpCRSEkuZeY6JLGxdHJ5G6TNSXTtdR8nJWLvYPRKBidLsZSXcikkI4vEQga1I37tBYH+ilPya9OQoKeg4pbi4m+qa7ZgoQbgAoKW4XL/7c0GbAAdhzCZRzkPgYa66Ceox6pMvxfa0YUcqmMCbJOfceIhKNMZT6LyHTA3aF+9Ep2/14tQJIViMF1zm18L32pxCGI2qhQglqPV07vUV7TFRW4sVgrQQYPLFuh9yRuqJ1VbKRSrRSCPQnnh87MsLVZx+08ipI+SEz5ota/nRohN5aduSXfvOz1JffO3dcMXn4Nffwq7b4rosAmWgkymdNk0XFEbYe26FP90KX/sM4vjHwSveAU991gwHUfDh/0YsW4H9wL/CqhQ6PEgObKLer4pDEXiIwIPAw/MUcXsbQ0Wulrlqf+AVsL5C+ALhK0SQNU8hpYfw3H4EEuFLRCDddp4C30N4CpEL0PEOcv0rKa04k6Syw1lGTKbWsDsbDrEWYwzaWoI05m8LD+Mlf/4+Z9z2c7Z2LUWm6ZSGJk2ncvta5+lRMR7J+hyfGfk+z6jexY3BImRTeLq1UysoIQyyLf+SuvKIlSLK5zF5H9VThKJLEu/cBJuatQgpKZ57gltFpPs2H6qnBGFiGK0l0HhZzQCTGow21aCcf0u+FPyzbhgzW0rUFulJkjB5YxKlS7Q2LvLPWGxqysbYVyf15FepNn9LY/3xpBKeb41dihCyscoQLVQf2S/lNEqemkTpp5M4vScaq71BBR7Cgt+ep7isF51MCgM764/2D6b0V8aCrxArBzA/vxX99q9h4wSZ8ynuqNG+eYzijipSG4znAi68ekzUUXhZUvA+rFLtXtwSUP73col4g/ADJApdLlE7aBHjyxeRBMEeS9HN2JMbYn0PqQ2ljUOU122m/d4N1Y77tj/LZqH6bskoSfLqP9vuXS8771pLxz3r6bhnPV2rH6D91tWMr/7b3A84uBHe/Srsc4/AXv4f2LEtsAo4zIPlCjqVEyUbFyFw+lLfgzYFixUc5cFBYG+/AfvKZ8NFz4Edm2Y+5mvfhfjPz8AaYDwF78BWUu9XkrblArZcgHIBWcgz1lG4qFbwz8wrHxsoR7yBhECCL6eIOlBIz0NIz0lIngRPIfxsOyXBD9DpGCrfRudR/4CxBh3VJ3XR1thJwa8Rkdj4348jVncvY/HwRj75gw9Qb19AjAspNo1gGCEcSTcRtfI0OzZ5EBf56sh3eMHEbfzRH3BJG6zJXgxT3oLlvHxt8YGtHfbP9yLiCInGRFlhxVbeEFKgB8fInbqK/Kmr0INj+9wdz5PTiXqG/p2ymjRKELXoyzLVr9bazjjhhRAYBPVa8tZw3BUP6D64l3J/W0ccpZ/ViX5Cw+AofTW3YJhMenLGSIXRtj8er3+6NjhxtbFWSN+jtLgTrxA410mn1drfmb4jmoXThi/1wf2kV92E+tyvaE+hMFxFaOdKZzzlamsaQ7W/4+yJhR1f8hKNSA1JPsCL0j+Ut1cukCJAKB+JJG0rUz14MbUFPS1tO3uKOY0iIRDG4tVCSht30LZmE8UNW2lbt+3m3Ej1C2nOc88jTtE5r7860PUCXfBJ2gok5QJxOU/Xgn5+sPVuaukcXipXfRV77jHYL33eScsnKuhX7mTHUrhPwz3aGSI3A1txBsB7DdyXwmbtPEWscSvflR4cBfanV8NTT4I7/zjzsZ/3GsSHPu7WXbFuPR8PEOxXdUfY3+n+EBAZkatVKh/PaQ2ezCKcFAiL0CbzcnI64IYeWhiBtRJUZoSybtmMpzDV7aigRGnZs0Hm0eEIQihsarHK+fRiRabDFpjGkkwnbG3vp5Iv87+ffQldY4OsWXIEuTSejPgymTpYZBMLACkRBQlbQkaGuuka8PnMyI/pJOXy0nGcmA7hW4MVTtqIU0s5r4ody7uv2r5t7Eki1ciCv3tbRaaLL557IuFN9+1z3TTsTNTdJb9l91ZbVEFRWtGHTDUd5fznRh8YfnJt48g/euVcS9WHsAaUfGlxZd87VT5ITKLxc94mP1A/i0Nz7oOScTM/7aAQEI2Hz9r61w0/6ZXq6bIQ4HeWiAbHEeohcvtvNaEF0FFAxikmSkkChTIWKQVxuYAVhritcHiSUz+TSYqMNTrvoxK9Oj8WPUkZ0L5CZm942chbUgv32dpgd6oP7Uvy20cJhkapDbRP7acNpU1DlySl3MuQQqFBakPYU351MBFegXHqRwAhFdU4nvw8I/711djPfw6WAycpp282GjYBo8CKEuLcJ8GxJ0DfCuha6MK/J4ZhxzrYsB5u/R129e1wn4E+YAGOrE8Ee/9WePZjEd+7Do47q/U5vPhtiI3rsJdeBo8xkByYRL1fR7XJDIfSCkJpnpdI3aYy94oslASb5VsUMrNBCOOatCDNFBkI57wHCSYZxi8uJtd3Nqg8SbgDIZRLSSoA7VQdsqn0kEWQTyO2dC5gpGsxV33unzll9Y2sWX40fhJlHh/ZttYdSpjMs6ThqRBI7EiKGIwYX9FJ0fP5YOU6Smg+UTye4/UwgXUka4FaJaKtv/OJHaes/MZIPX1Rmlq725we06Tp8MbV+1Q33UCDqEdqKW05ubO6IXPH8zskxQUdCE/i5X3C4erFFa2fywzjRkiJhe5otP4EaiPXinKBXG8bXnvhyrgWnzvrCTWOv7vEPtYSdBaobRl92vBtG97SeczS/5CenPTk2a9wYyTQUaKYdjhTCfGW9uE94Wh0pb7LblrJgTSvfquiJCcyUrNKbS3smHi8itO6LhaQ9RCvUkUHHmCQOjPw7sMSWtPVuo0vbRaoFPf1EPd0YlVTIJa1SG2HpBXfM+jnCeFI2ip5pvXlQozd0rgZqYB2T1KSM1CL1nDROdgf/QpOBDzPncxYCutBPOYYuOT18NTzobN399dzz11www/h6i9j/3oPLNHQ68FKBRs09h+egPjZLbDq2NYdvOtSxM2/wd5+GxymDsjKi/uVpBuZwZS1pNJegDHuWYrM7muy7I4KQIPBkbayjpSky5TmlHYp1g4DkqD8GPy2Y7HE6HAHUjk1oTEGKTLCse6zU3safJ1y74KVmCDPDz/zQs675ResWXYEUqfT1q5Tjh0uiZNTCQiYJEo7lKCwVGWessrxb/U/ooTkI/mjOcmMIjPCSIFaLaa3v/OfcmF85Mh49V2xsD/LWVCzEcpDIE1DRtSxxqQGWQicx4y1SCUpLunBby8QDk956OT624fLE+G3a1tHX+gVgxbuFmASA8Y8sdBZutaWC9BdxivlfiCHq5+z2nqTLynrXBaFlBhtMGGEkBKZ8zKvkJmv12pDvqdMPFr72MTNa7+IZMymlnQi9EXel80v3Nnhjm8z/bn01G79xI2xws/nkskUixnt6W0j5C44HbWiH7NlZOqWGAuCYlLwfivitLeRtCgt5at+NX68UXJ7XMpjPYGcGMMbrqC7SthCniCOAUmc95D7KLufcJdN4zysFBBrvDhBiBSTGczF9JJoBrzUXBmX/Oc1EuXowCPqLD0hN1y5wnhZ9KTKs75Q5KarPsWpG9dDroli2rrh+h9jr70BThFglXMQuNtCTiLe/UF49Tv37IIOO9K1l1+C+Mql2E9dAqtrcKiCZcqpS17+ZPjlA851rxU++R045zCXN6S458FJ+xv7laQL4zWEBSNFIemQZ8iGMIx1YrNoTt3fGIROrWhpOJ1roIJFI72lqPyRSNVNGo1lrniKhssXQmAwCIQjZwsqjYiCAmuXH83B2+7je19+Lcevv5N7lx2GNNpZ1YVEZknzp4yAmSRuwAjnzieFQQiDGXH6NiUMNeFTVF1cEt1GXeb5L/9gTrajpBkRh8agahF9Qe743p7gpzuS6PqRSv3T1nLVjDfuIZKmAZQUxKnB18ZlC4wSCHxsFBMPpc7TBffS8nIe0ppvCylf2HLxbLPU1qk5IUZghiswOIYQYkQKrjVSPN1mQULCUwjEiDVmre+rIdHeXrXaYMKk31qO1HHaJYRA5TznDz0dQmDCWMmD+l5ZPmzhJ6p/24yN09FwR+WbwlqJFLt1KBZCoqNESF+tlkX/nLQenzZj4VcBaSUi311+XX6gY6ttSpxvt44gz38s6lknY4Ym3PCWILVFWU2tK/9La80qmTbEb3uXivQpKkyqSEmal4hUu1yPvpdJzk5N52u9z4U7K1xEI4BKDPmRmosSnIWdhLEYT/0uLnZqK4QS1hVkFqk+IZgIr9AZGeeE4I5Sgd/89FJO/e6d2OZiPSlO93y8cgETJoXbQJx8Knz4f+DQYx7chb30DYjHPwVedwH2zjvhKAWHSewfBxEffhW856ut91t2KOJVr8f+52VwvD3g1B77laQrhyxEIDGYktkx1C1Sp6C3xmTEmqk6dMNDIhv4BhARljoCH8FipFyKZAHWxBi7A5DZytg4aagRvJJNMqljjFSs6TsIckVe9puv88mrP0BXUuFvSw7DT11yfSsE1ulZJvN8TL42jHFEnR3HmCxh04TOluXOH7tuFSXZxv+Nb+M+WeQ3qo9j7TixdSE0daORaUy3V+CIYsdZg/n8WesmKh8Xg+m/iNS0TmtqLDLRlJ9+IvEdD2Dj1Hm27AcoT6LDmPrQBG2HLEQpSRoljkyzVYrALWqCrtIfo0qUmlR7LQlNCmxqlsWVEGss+ShCGjA5/6eREE/38v4vhK9+LpW4QUp5R3XLWKXUlyN3yAB6IqR2/zZsPmjLd5eeE43V/09aiw728kELCdciA59oaOI5GPuJwsJO/I7C1uj6u/8JbZEzqpXcasGkBiEtft7HaJO3xj5nRo4SLv+1n/f/3fflZ7TWkGRjtRIiTliJeMkTnUdLlLhiBtqS5j2vsqDt69aa06S2TbplsV7F8ZOEMSFK3ipTu81JtjufsxEC37hEXqmUboX2YJGFyMalHFYpRGpIy/nJPCKz78s2Yew9VoojAVRqiNsLK8eVh22633FfF6WDVsLKO6FP7dSBW24J5z11K4iXvAI++vkHf10NHHw4XH0T4h9Owd59FxzpwVEG+8WvIf7hjXDUSa33e80H4bufg+EY2g8saXq/endUlGLcl9SlGjZRcrdOE1cmyjaShOLctKyZivATdawcAauQ0SGo6LF46YmItB2bDIGZyPbPJF5jMVq7sWctKo2JLGzoXcrGhYdx1obb+enl/8yXvvJOEi/g9gWH4qVJ5gHSlNbUWHT2smjMBZu57DVc+JwHh8XGJvMBd1KhRFMVCkSOy5I7abcx2wmQWSyxtlDXKaNpnZo2FI0kH6i3J/2dC+sLOgn7W7SBLqq5AH3qYchFXRDu7yRGAh2maGNIrMUomaXklJNN+x5aym1Y1s7EF8IFvvTVd1SKE1vHCQ3kFneQ68h/y8Mele8snuOV8p8AcaP0vYqQAmMsyUSIDuOGAXki19f2tbZlPSdLKe7XaYuEQhaEr9C1+DFezm+3iSYdq096jTQHnTY3HafY1BCUc+TaCwjf608TvT4Nk+Nki5egkMJVlNHmk3459wEduyRc1lhsnGI9hffc01xQz7g7vgC81BB1FN4UtuWer3Z1pXya9tTVcVvu50nBW5/m1HVp0X8nsEymZpcKKb7ZtzUEhQXtKVSSEIQhRlmE1bttUidIrddZF17r5q6Ui3TBR+e8qVbwnMthQzifbG61O0nQr3/LviXoBoISfP03iI4ybEqh7LxS+Ow7Zt6n2IZ4zj/DA9Ay2d/DiP0qSbff75zSpVQmkf5LRtLofxPiNik9pxPzXPi3SQ3Cxgg/wdTakLWDkPEAmCKGOsYM0QhfkzQ8LgxCek5/Zg2RFoy1daM7eiAOedL9N/PiP/+AC275Cb613LlkOVYqgiRyqUjJIgSNyQyXKiPlTLUxKSXaSR11Y6ab1J23xYDVCKuQJqamSnSZKq/Xa3ivdwTdxoX6CiA24AvNtqjmSC9J42q9XhVSzah+FT4wWIF60jLceF9CZOGSE6u37mZDgQrUkFSyZcIFm2qEkuUFZx3eIfN+TViQO8YQW0aHMHZIZ1kGTZRihKBtSTdJJSRZN4hXCAh627DGEm4do9DTNty2pOd54xuHb24l2QgpMNoWdKV+hA3jmyzOFXCXvMyASRzRe6WAXG8bxa4i4UitfeyuzTd57YV+0eTvPtm/J0kmQjD2K0FH+W0CgfCzvCQWbC1EHrsCVc5hx6tTqjycIdwP41uTMJg6n0Z1gEygyATLnAnU2VaKs7UnP2AVX8DwXpmVYnORsQbPmH0nTeOObwEthJM15rYXIMazqejyUktZSnM704gO/F1Dwhu7SwO3gHjdG+Ddn5zj2Vq4+05YfzekFQhTWH40nPTYmXfp6IH3fgpe+wroN86P+vrrEOvuhhWHt97n/NfAFf8NoYb9tGrdG+xXki56LieyRJBI8ac4zR1nrX5jGEZnouxBpu6VpGJ7PmdvNHHhyGTo4CNVfTHYACuqGLN9ytUiszbbzBhoEcgkIREBQx3L6M5pnrTtXs7681X8412/4ZD7/wIGRvt62ZwvIq1F65QUUMKpRQyNYiYWm2bqBCsnVS+NCDjTqAZu3d+STHdtNFYorEmd+58JQQS8ONnGl+UyRoRP2aYo4fTTiXUFCNPqGFs2DX6dUI2rGZblFpADnaS/uAWzcQjR37E/H9XOB57JU2Iqm9TMc9qRiFClvPCKAboeE0mPBInf045ONL7yQGuSuiXfWSStRtjUGZVVPiCNEpLhCkQpsqftL6oQ3GnC+KhW6h4pob6jspAsiT02K0iQ8eEkObflCTqLyGJAUM5jxutybN3Qb1TeW9Ewpu0EIUgmQnJdpV/0HLHoZc5vPNsouzUygOT45SRdRcTg2E4T2wQKL0p+6UXpH9PAO1HqzM9UiMDKpnucGfGEtiCFF3XkX402z85F9pWe5SeNwwU6JZX7Lse4sAbje2h/T8vaWOP015OqwV2ivxt5XVA499kGAgk3G8QzngL//undH+rG6+Gn34Jb/xe75V4YyQ6ZAB6Ixz8dPvEF6Bxovf8zX4744vuw69a7oJf7NPziCnjV+1pvf8hxiCNWYe+5z+Wj3t8e93PEfiVpnbl3WASpTZGKtYVc8S12LIJiXxCk5djGujutt7/MjuVP9GMfo6poO4bR1nmHuETPWVCJR5oohDRIJYk9b0wE+pbXr/n6ukV/+XO+e/Pa5csnthxyc6Gt53+XraAjhb4kZSBOWCYMbVKyTflsty4hTOYy5pIvCbIETpkrmbFYYSeD4rLIZme8y4GRruqyUAprJIgEgSCSgrIe5Ww9xDe8JRxB6nyjrSHUlrg+ykRSu5s0eHtJ+LPyndQBZjzCWjMphO0vWG2RvqJ0cN9UKZvpEAKMpb59rM3qqZwjO20SeKBtdesP/jyu45Sgt438QAcUAnKdJcJtY5g4PQQhTrbGHlwfqixMk7RshZBJLa5rw7C1Zp30vDvjevKXoBpWlbXXaSGOavk6kwKT6NLk6UrhKsRb0GGCKgTk+trwyjm3EEpS4vEa1TVD16S1+Ligs0izEXDyfliwsb6lfaD9aUFPmWSk5lY24MaAEihtSMcqJIHnvGOy0H/3mgIVgzTmLITtx9o0Nx4Kq/wSJlkQtxWOkFqfaDz5DCPFEoGzzcjEYgUD9bL/Y5HoVxRj+8VUKZTReNaQIjM12oODlQo/jPCiRnzA7o1lUhuitnwhyfvI1GCVQGpTU8nO6iglQGzaATcDi5rOdUgjTuyET39v9gPdegN8/P9ib/qFc4nrxhkcuxonAqRgf3QNYv1p8P1boa29dV/PeAm8//0ur3QZuPkGeNUsxz768fCX+2hpGH+YsJ+9/5sfvBta0ZhH0etDJfk4SvNPSoe9j0Sh/xghQ6yskKY2c4kyWCknyzrpWIEwBEVzJ9K7ulCc+G213P77oBqN/+sDX+OnNw9z+ZLDueeoo4sTiTgDXXs+RflyopDFUcoxccQpaciF8QRHeZINskDNaJQQWfxMw80ucwsUDYPh1IMyhiyUUGBxuklp46wEuVO8JVhyNuJx6Xa+7C11RmwL9SjF6oTUilERdJ+cs+MVq2OEbT05RC7APLCd+JY1iPbSQzBeLCZMKCzoIOhrx8Sp0y83La+lr9D1uFTfNroCbVouCV2OEjEUdJUqFkvb4YuwqSYeqeej4cqr02r4QpOak7O0gYRh4sp0SUESGahXaUQZWiF2mGr0PWvMkWImdY8FoeROJ2JSgzGW8soFlA8ZQG/aQeWBYdIwxqaGJDVfNJanBl1F7HR9sRDOdVPrtcWu0pkm0VpHSZbVj0yAFKhNw+icT3TEcrASL9aknovYa5bMrRAhrsgTMnvxY8xqoe1vZWLw64lvlHhBUsq9x/hqlUxc4V0Vp9Q7cl9Q2tyaG6/dLKQgyLkKPnOtDT47LAZB4vmYnL/Tc54JwoBRcpFoXlFYsUUmZqfgFbFlC+KxT4UPHIUoNkn/28fhBS+GQtvMB/nvD2E//K+OiFfiUkNM1sDLYHBS+ulgf7sO8ZGL4QNXtO7viNNdEYJUQxns5tsQOgE1wwpi5WGZLr0RVffwY/9K0jIPgJUGrXPoHZ2YusDvt+31Uf8LtW35C6SKQVVINNjEZDfHkaTAoEMw1iMoml91dI19It8trtmxYwFSGFcNRSvW5weodVToVZJxm9YmaubaQ+L02mBB/2WbxfjHNwXpEzclRX4WpnwqLPNv9Y28SEXkit1UdEqCnbwRDQOkkOwqKRqwKdhOidUpVgu3vCZCZikdnKokYYUepdum1AzkJZQCDyM8tLZJ4d5tFZJ0Vj2z8iTp2ATjOyawgb/fh4tQCnTCxJ0b8brLmCjBROnO49T5ix9njW2bydPEWosKvAdUewEZRihfEk6EzwyHJz6FsQcLNfXibdgCGpBKTKq1sp97onpykfDk7q5/imGEwIQRuf4O2o9ZigDSRENqkEIQVqKP0Vb4Z1/SkqCt1iDFUL6cf7yNkolJ7mpo3QIPGSXUHrOK+hOPxyztxRuuUhgaI2rLE7cXkUlrFrVCTCbTbwSQYElUrL8mdXhVklNfTAv+86S2Df9q6iX/isKOicOEtfhJigp8jJS7GBf3BDYr3CqswYsSV+0lq3vYeHm2grC2EAp7RFbNFuN55IfH1pa3jGYBOCCFJNi+nuo5L4N/nMGTYib85yXY934EjgM6PYity7EgBJPVpCcvAhegfyTYa7+JeNPHoG/xrn0uPxwGJFQNtAObh+DuW+Cok1ufw4JDXXYWY0AcGHrph1SSFnjgmZXjG3LX6EgcKlSF1BhsarNqKJllWzqpWUcCKVNKC8RrpUgv93MJmACj3QC3VtCsELOQpSi1WAV5K/66IvWfpEgvj317cdFTbPVLvCNYxcahO3l+vJVlHQuIjCbElcECR9Qyiw0XOP33ZHVyCfQIdKIRKRjfBUca7TxGpPUATb+YoNtG1IQiMAlSOKNnEKg+Dln8wc2+914jScRME8JX2OMORtxwD+KXt0LvDMu5fQQhcBnd4tR5LUQp6eC4q8Li9DVIKUmFeJpVM5BmppmS2NuLEjCa2pax19WHKpdJJRF7kchGentoMM3IpnxwP1YbTOJcLYUSJHX9Lkq5t7dM6JdZh+PxsC5z/hmilNuUhonL8ucrVN4ZqetrB0mtZeLjr0JZS+7eTdgsSKI4OIZViqQYIOeaHEuAVRKZ6mo+tc+vthV7rYmfKKxAxRrjyUPHlvWeJ5X/I4zBhqkLv9/L17YFvCjBBFmWRyVJfYlfixEuCU3LroWxGF+dZFW+XTRyG1iL9eQtccnHeo5KIqtZ3D/AWYWePTuxX3wX+76PwElA3oPIQk7ASOpydiwC2qdlrLNAG64+4s2/g3Ofv2u/HWUoBjAcuqx6FWDLAzOTdNcCt53mACgu6LCfT8NO/ic8C8KsjEb9G0Uc9wovIUkyi7rWbnIBSIlEo2NJHAVjS46ceIrfqf+0Y10Ok5PIGdQD0yGEIE4TiGKWLup5zTYTHVmNwjMXiZTNXsBVrGLJ5r+y2gqe3NmL1RCh8TIpw1i3vENkvtJCYEKD6JSIHoGupkjjAhasNVjpIaxFW+eal7dVAh0z4ZWxNkZb8D0PsEyEw+8SQ/K/lZDrZ/LssKnBP2wRHL2c2g//tF8l6ckVQyPIREps4BEs7KLQVcQiEJ7Lyje6dvCVTtXRyrDoPC5kPvjfpBZhjDi9unn0Mq8Y7KI6ad6n8e3kvZj8zu58fru/ENJaRGGgi8KCDtJa7CIYBaRx+uq4Fn/Q7yjsGhSUGQV1PaZ9afcTZeDdozyJ8j28go+phKS1mImNI9RWb0X1tCNqESqKJ/3yrXIFKgpD49j+TvQc61taKTIjtkF4OfzI/FMi2IAgaBhxjVIvVKn+0eRZN1yG9hBWSmSSkB8epzbQPXnftbAI36MwOEZ+aBTjO8m62Xjs1WNGVw6cp3PteNUIJMhU44/Wfm2tnEz/WI8jDmrv4dSFB839xMI6fOSVLtFsMZOgczj3ubY2xEufh/3992FiFAozjCN/BqPqRAXqscs03lBXmVncWYMgk6Tnfvr7G/s3d0cWlx/4EWkqOsJt6reEYa8JNDrWk7rnSf2PdAk80kiipUjKffoM39d36D2JABJuyZ1W6uT9fvyBLjp6OymE4T/fPbL9/ricp6ueslm2k3T3Mjw8xLVjiqe1d5Fo0MagMlJu5HvCGOcxULPIJQG2U2DHU7ds1G6CS2UR1nPXRExdedRxeYEb+vgwjBkb3cr48Ni7emvFB5QBK1uL0sJTMDxG/fo7oGuWPLwPEjMRoFSStBZhogRvoAtVzjP+t02XpNVwwGsrtJwoJtXIvF9rO2rJz+Nto1TWDX1RFYIsTH9XYjRhivBlIpX8EYgbLWwDq4UQ3RJ7kEWcaiyPZa7+/FojPEXbYQuRhQAlwC/lGRmpPas6XP1s0FXeNaOccMNP1yI6j1z8D8Wu4o02dMY06XsklZCRu7YQjtcxiUblPbyij25lbJQSkUVUGt/Dy3zbZ5MrVJSgwjTbJsYfTLfR3/7jqL1wvopSN+6S5FgTJlnAyZ56Y+x8raqxdGt67sI6o2DSUUIlGpll77OZr53UBl0qEna3v0xmKVONr1C1+OZgvL6uce0AhTimbsaoJjGlmYhzOq77Lnb1GBwrnKTsCdieImwJPvcHOPQoxDeOwL777XAEUzfUEy4h09IiPPZJrfteew9sM85wCJnpaBYGDkNnrNy/C9c9wn4l6fzICNaX1IMyI/fww2Q8XCjyMSYEYTVo7QxNSk4StDGSKJKsWDF0fl9++I7Ia2cw7cBOG+nOVCeoEaBTiENDGFnixELg460cQB6zDKskO8IET7avKfv6t5Xhkcd7BR8jBUnB59hywG9rNRbWPI4rtrFDp06KkBIpLNYKNG4gi9CilklXhCAxWM+63zKXKiO0i9CzIVtlJ4OiQLcJMcLltQ6jmIGOBSzqXzEyZFIzm01dlPOYLaMkGwYR/v7Rje1WQrWuZFl17XbSanRUPBF90Cu1zoBHxsNerL8tx6oxvne6CdPDvPyuQ8wFiCT4pdwvhBCvsWl6PzSpHDMVpBd4eG2FlbXh6mdtqp+8u/SmVlv8tgLECZW7N2NSTRomJ1e3jV/tl/OtJ6d1HoVBzntNobt0lc4Md2mUEG0dJxytkUYJUklUzkPEyaxGXJv5aRupSIvOJiNNJvwaC7Kx8pKINCG3YwJVj11BW0CFCWnB/03YWTrfReMarJJ9SXdbwSpZV5UEWUt3ivCbCwSutqjNznGX37UhzfmEPW34oabe34mM0+z8LUlOvt74os/p2wXa9ymNjH5BxSlpYYqMtYCSCvZM0L/u+646o1XQSBmxDfjUf8KhR7ltVh6fGW0zq6EnIEphDYivXAqlGVj1gTuhhrP/ZDERzJT8CWBkW7b9nlzA/sX+9ZOujwGSzRvb/m18U3pmoRxhY4vQWfWTzEBotQuzVlJQi33ae8KPdrdVf0wqEFl6WZlJnBKLtpJNtFOnm+VsJZezFNs9ujok99Y17Xh0+JLKLWuxSQoIPA1+T+7Psqv8eFOJ8QV0+BZZkAyguC2qsszLEyhJSqaCmQxqEZjYIgsWcbBCV1OXqE/j9HLS+XELIbCkoCuszrUzRI4Fpuq8R4SgVCxQyOeYSOL3x2H8P1bKGV/pIhQYNCIfYOv7PjXXnFQIQqACDz0eHhqN169TOV8wk9oiq9Did+Q/6imBHqmeb7UG4e1MagLSekKuu3hnzwkrzhm5YyPJSDJ1PmKqPx0lJGFyvzX2N8JTT96dB4IB8oEi3TpCNF4nHKweEmtznV/OIX21Sw4QIQVJrMl58l/z7YXPWuu8U2pbxwjHwylyDvZ8mghrSIs5hDb41QQdSNKCD0YiRB6pNdaXTBzcP430LVbIUS+Ms08CYfH9KM4JSd2Y1sE6s56LAFf7ZTcG2Gwsa5vgD43SuW4rKk6o9XSUB49b8SGVJJN6bK8eV1RivhT1tE+ejxCCTeNVLj7sMRTnKkUDbFybkXSmD99h4KheeNYrp7Y5/jTEwV3Yv47Asdr5y6wF8e43wXNePnPfN/zI9S1c5TMkkJ/Fu2T7PS7HyKR318OP/avuGKlRqXQeUtuiPxAErlSU1QZrnKpDSJkFHjhLc1hT5NqiOxeelntnVS7DOn6lPaexSNJhj7QcMOYVeYb5M6el63msWsfyUzwWHX8Qi+u9PGPU8O0/R4zsSFgmw8mVUT5MWNexINpR7qKqPRalYxwhq4znA7oQbExiNsQhRxaKjBuLzFzzjDEoJWHcEbRYpDBjGpEZE6WUCOUKCkjlkjqRxvwqWI6yxuUFES7TW6pT7h8fjtV9Q89uS4TRs0iG0lMYJRBKujSS++aRuAXs7ia5cH7jUkmi8fBZSTX6igy8TqFaE7SQgjTW+HClUuLukZE68XB1lZpBL2vilHxfx6e8rhJthw4wevM6V7rLdzkTbGrw2vOoTqfmSUaqveFw1RnvZpg4Rhu8vEeho+CeSyG3wHr13walXMmlwZ22o5LEY3WkJz/utec/pFPNxNpB4npMOhEivbmTs8DVIpRRMrUUFzAVjw5eakkLPqIOUipEGuInBl3OuRdcdl+FhVTSaYWLE8j84+uioqvCZMZbT83Zu6NB0HOGtU69AdT62hHGUlnY9R2BKTf0zmnOo2Pd4LvLmwbDpMnFzloojY2xQM6QbW6mQwo9pdASwDiIU6cZ9vJF+I9rEa9/IXbTasTiw+Ff3wIvuGjmjrdtwP7u1zCQnVwdGPDhiBnSlgLct9pJ0WIGYeRhwP5N+p9rK21bn7ssSQ15FTk9nnZLKKSrQm2NBimxRmJTzbLOkeeVt/hExqdhOFHK0DOmGQq6mDAdvE98j3cEvyE2lm2qjPZ9inmI64rnHBZy+NKAi+5eyUYMgeckj8DAhEyXBTZitNjFU8fvZ7lX56+iA09rSnnJUJKSGOvyc+CO7QrNGqgZvCMKGE9gEzO5YtJaI61ESqdOW2TGuNdfwI/zq1icjruXMk7Pbf2Aom77Qi4X/kHk3MphJsjOEtHv70FvH0cUHoQecgoG2LA7gna5Jyw6NU9OqtEbotQ8S0hQLSRRtwOufJVUSakj9xo9OM5EPIrK+W0q7zcqpkzBgl/OUd8y8kC4fdw94kQjcj7SUyhr0KlB5gOCQCECj3D7+ON3914xYULnIQvIt+epbh4tVTeN/E7k/AWtPTlAhzG5nPdFFah/SbVBhynpWB0VeKjc3KeFNJbU6ldOLOzZ6IXxzyzQqHDSrL5pqDucStgFvWgpII5drFbmeiiMxUpxyuTtkhIZ6wcgTYyXRcnuL4Ju7GctVkkmlvWTlIJPWMu5fi10ahxfAXJ1cfvEpxCgmtwN62nC0lyZp62YhQRbHc+opmcknCR7+NG7bnjMSXD97Yjb7oNjj2C31tOvfBC2A4uzPCKjII46FHpnqTB+1w1QggNGjGY/k/QQPS+aSJJzfD2O0SYrR28z/0MzaWwQJqUe5ehZYj/Z2W3vSgbrKMLJfgTOCt4txjh58H7OVrdRLRa5X5dRVtOdhsTGEKeW+8Z9zujYykcP8fn32ikYnVIQhlRbL0lGnradPpaP7uANyWq2FkoEESRaUEglkTbUnJ8sxppJKd9OGESvQB7lY8bcNTSM31K64Adj3QSU0Qif6n46g7LEsniMVPogM4MkFispja1ahMzUbzNBLumhfv1dmOEJ1PK+ffFWF1KKV1jLGLs+d4PFQ4gFWpvD08GJs40xh2Is0peT3g8tYUHHmo7+4otKPcXh4SjFJnWXTnsGZY6OU3KdxSPaBjp/Vh2pElVCdgpWyRjZE1DbMvr8pBKeoHKzSNFxStBRpPPoJdTv2sjYxpHrrJCrpCdbnrfV1uS7Sh8vdxbfWd1RIanGCCFomSO7FYTAKoWMEnSg3pgEfCrs6Ez8SK+ysKEwNM7uS/CA9QToFJVadJZt0UJeK549qfWREj8K/1wYrqADn3pH+6S74G5Oca8IGpx+WhqDxXxaxukbhHYVh6zn5kPXfVuf5UWx06s37JAIRmsTnHfYSXQWint2wCNPhD/e0jA0OYwPz7BxAMceufs+t27Afv3zLiAmFU5SnwCOPGXmfe6/Hbv6Pujh78e7Y/SOsWe6PM2pi9QTmYEQ60rlYEFKkhBy+WSo/eTef9nu9WCafCGFgFgLtFI89r6/smBskO397ayyI7uExzaecRgrlsoaank3g2tGWTWyna1l8S8b2nu7StWIKyaup6QM60QRX2uML8kFAh1DlFpyWcJ+ZUAqix3R+E8tYLskbNYITzRly8tKJOGxqraZm0qH8vmOkzksHiKyzlghDHi+pDdfpJqkF26NkreJVAzNdu/M0DjBk49F//JWzHgdUXjQeRsCY+zMKccy9ySTusg6IYRjScusxGWsxfflh3NF/8o00eT62sn1dxAOVbZG43VkC5WH9BThSO0dflv+G35bfnu0dYzmfCFCCDxfUp+Injm+ceTrqjzL8tladKxZcPZK/LYiD6zedrWO9clBR6FluDcWpC8nrDb3DG8Yvtham5ONF8TuOS3AElpt/ksaI1Nf/rcN5MuFkHhh5Bupfu/V4qcKbe6ck34qu8/OCGjwaglxR/5T1hMlkTpVibAGm8t9I+71pk5wN27SD0aCFsZgPHVMbUH7p42SZ8vMW8VKQVwq0LZ59MXdqzfdkxZzztvENNwkLRO1Kk9c3kIC3h2e+gL48hczTlDQA/YX30dc8j+zX+hseP2zXf3DjqzaitFOjXFOC1/qBn70RRgEFkknzR8g2L86aas7ZJpOJuUXDXezRoY5bRDGEOY7Wemtv2jx9/5gqrLEpH8suPShnsfKZQW6fMNooR1hZ+Y3N9wFSWLRvmDDQQcR7lArtwf++1aOb+V7237IUjPCXwpdFKOUyBekqcXzJSiLSZhUd2gsdtwguwTy1Dx6xBW9lEZOXoIxECNZpEdJZcArFp1PSYfkdUQqnfRnrIXUULMpBc/zVRieZSzfm7EILGB14pz6fW+/JPtveePIyLnJX3nGba1L7yqMvUz53rvSekKwuIu23jb8wGNEbr2xtmPiJa1IWngKE6cDlY0jf8x1l/6PUOLHQskdCCmsNl0Ie0ZtcOKfklg/T5bzs+YtsQaCtpzxw4it19727rASPyvfOQNBZ+dutW2LJ8IvCCV3KrE2FxhfYWP91c7b14QTxyz/J6skYiqZ/2KdU39KyrlXqVh/o5Hobi4QLv/069KcvEhmL0rjK1Rq/pKvhDdYT5IqLzMkztirdpc4x6NasqRUNlPF2IPjcu719e7SW4ySqFg71ZensJ6kbfPIW4KJ+OtJMb+TDlkgGA2rHDewgrMPPmqOV9yEU56COHYF9t51sEJAr4S/DcP/uRj+z3/veX/vfBn2plvgeOkIWglXmuvYpXDqU1vvozX2x192QTNmH1mA9hH2a/5LU68Lk8STrk/WGGyaujj6LDIwDAXdavRXPaXa90ObY7KqipAgJDWtWNQuWJhLqBs5pwEocANvu2yDtoUHjbep6942dqN386ZvcpAd5+58D0VlUUrgeQLfA+UJPF8ghEWbLH+0hGQohdNyiH6FnXAuTMaYydzTMZIuXaOcjHLBwPO50+vl4HCQ2OKMhpm0pLVlpFJnVIcUrFmS1yk5nczY8iYliKJsBXLgDBohBFZbdJTi5/0PqEC9wWqDFBBXIyYGK4xtHsVI+Z3J2oPTYS0y8DCpXl7bOvYlbVmXhsnqtBauTqJ0fRKlP0jC9HnOq2c31+5c0WztL2tItowc6rXnZ1bNTEFKT809SKb5+gtBwni1Lf7N3VEecRhSbrVZmldhLFbJQtRR+HpSCn5opDhDNBkF3bU3NZxqQWjTnhT8/0jK/mVC26lVjacobJ94VW7bCPlNg3ijVcwMxXYtIAQTSkzlap/ewJGyTFKnppD4RorDUfJVSTn4TlQK7k1z/lsAvMz9TmeugfmR+svzI5X/1L7aVbi1hsGwyotOOZf2XGGP7ykAr/mok2KNcX58qyT2S5+D979t7n3s2Aqvew72iq/AMUDaoDfj+r7oIzPv+81PwN2j0CcPKFUH7GdJ2vPt1iQGz3NlqoTwQFisSZEIUpNDeIwtslufp6ylPrAQ0XSHaqmkv6AZaIuotgiztTipuVECYDz1Ca0lkT5jXqF42vg9zz8y/MXHXrL9Tz2rxgfZ6rezWeYphimRl3nZpQKlBNJzpaSUEWhlXYWRIY1c4uGfkSfdmmS1DsWkmqNuJEuo0GmrvLT3Aq4OVnHkxHoqno+HdiHq0s0GkXl41OoJwsrtCDHrWBBWulDbfViE9EEh00ub1CBga7Gj8Fpy3vfjCY2UAisltQd2oBPT0NUPeVJ8IEn0u/1CsKtka11NwQwlm+pVgEu2JATCcylHdZS6/N7BDIbLjNBkuYCM03FCvV/qQTbdByPygan/5m8Ezz55gz/QebpOwmt1oFZ5cQoGhNEkef88Yex5UsjfCsOPpLU3A/cjGAY0ki4sx6SF4GwreKnxxIAwUzk70mJAfrj2L/kdE3/RSiKNQAcBs40aK3iXgfMtonX0k7Vo5Zlaf4dvMF0276/QOX+JlQLj+QhjENk8Mw2vIssthe0TF3mx/lOa29WALaxlNA45YsWhPP+oU/f+vp7zj4jzn4C9+jo4RUEi4TCD/eInETf/Al70JnjKc6Gje9d9tz4A11wBX/4EdtOgC4qxyq1AC8BfDOKZp8G5F7Y+dr2Cvey9LuDFZOrYAwj7laS7lhe+tH1dckGapHhKg7Qu77KSxFpiknC8MJB/vJDeDm0TpJ4K16ylgv58ylGlCGPETnXeBJCiSFEUiJZJqw9XgmWntA8t6fT0MhPHqw632464YsvXeoO4QmQ7uK80gE4MRWuIlcDFzwiUcol9hCfIG7CpxXggQoupG4KXlrEKbM2iPetyeUhFgOVoO8yYVjyr8HR+JA9hxcR6Es8NDqsUUnkopVDSrRqkAG2lTnOFn+4292gxj61mwT4zJfh4iNDwH5ZCoArB54JS8O/Kmu1Jtp43qUZ4ks6TVzqfWWMRSiCU+vftf7z/KdFo9dSgrTCrhCu8XfXCaZTiF4OfCCnipJY8d+aSWA8hLFDOY9duJ/3FbXhveNrawgM7TonLua/EbbnzZGpcytHUNKThxwOPN+5epRSZAJuC34EgaCRLkqkTN4ynXNmsSvR/vSj9uPE9VJISlwtEbSWkbq0szUbTSQYxe1YjJTH5gpOmYdITpXHjrXLGQZnqMRHzMant/5PauEorrYKBhGCwo4+33v5X8j87B4JZUvRtqsFrLoJz/7n17x+5CnHH0djbNsExyhHtURq7/na45JVw+ZsQK0+Cgw+DfADjVbjvNuy62+CBBBbg6hrG2YMqAHdr6OmAj/5g5vP60KthYwgnKheSfoBh/waz/OmWa7pynW/Y2rXiUhNHzhqe8yEWlIL4j0rUX5KY3D1m2vqplgr68oajOiMMgtjsvMKq2oAS9WetYOIdBS89FYtnBRzfWUUngnps6QbCXDvDXgc6MeQSQyLdC9rZLyVSGqS0SGERHhRjicY4J5TNmtxzS8jDPZIHEvAkEkGXrrPYRKQ25Sd2CW/MncSatJ2B0dWEwiMpFfH9HJ7nYbXGKB/jKaSwSM9D4L3CT9LRXbJ6NcNa5MIuop/8EbN2K3LRnJLV7DsGE2T+7DQqtgx5Sn7L7yl/Tglxu4m189aZlsVPFXOuIod2D8wr5mhf2PHEkfH6VWmknyozv+8ZPRMadrHUqXiUtd9vW95zfn2s/ploRxXZlp9p38by5qGIEwsQIDoL6GtvheeeSiHvj9hK/Ky0mH+JSvT/TQr+CqGdv7HY+cXkIbKsyBkvNsjcSrKwcj2Yn4jeJBDfdBW43YZxe5nJ/ab+2mX+7nYQWIuKkoZ1EbCT2fkEIFN7H5ivBlH6PzY1W9NCriFR7wKBZTRf4oiwwvOu+TpsADubtmMMWHsz4pSnQ/eCXX9v74Tv/B7xgrOxf1wLx7tbxiILVsN4Ffv76+HX109dbA6XZ/rI7CY2dNCk8FcQ3b3wneugp3/X4wH88jvYL1wBR0lXTOAAxP5NVVqLKSYjly0t+Jsq+Y5/jnO5VYWkdk9HPP6Dgk2/sj3fj2kK95ZYKlqRyykWt8eMmNxkvU+AxEoMil41dnFJDF1ekAVi6YwpWKiaHFZYrA+RNi7trDVY6QaiEHbSwUQ0mgACyFcgXxMk0mIeSPHPzKOeVCDdosl5GmxCkMCE8viqWG6v9A+uXiOXjKKTaGEybo1AicAUdDXstF6ST4MAz/NQysP4AcoP1nk5///6hq9YZR2RNUEI5zFS0bEzFm7ehvjDPYicc+Gbwwqs48E+L5dmVSCwE9JT90gpbhSe+o0Q/FSluiJ8hYnSqYyA0/dPtZOWs/BbXU+QUtTy3eVzwzC5SGLfbrQ+JHM1QzaRvImyyjgCvJx3m1Dyo8J430AoTD05bTch4W1WGzC29BDo7wUWj7YC9v6tcNVN2Bc8DmOqeGH6VRWlV4rUvNIq8VIdeCcJaydTizofaOnygqRZ7he3vEIYO4zly7nR+se8erI16i456To1RG0l4mIe0ZCi3SX6QO/eXYGYirkxdljAbcryR1WPr/Wi9FdxW95aKeYQei4YzLXx1r/8nEIe7KmKWV8TAXBLAhc/E678U+ttBpbBj/+IePsrsT+6GtpSWALkJHQIl/x/J8OendLxi4ycNwLbQDzhifAfn4eBg1ofa93f4PUXuv4DCemBJ0XDfiZp63nEKqAjHv9+rux/f6y9jY7NWyhToWJKOxG0AOpGkZOW5/VsJqcMNb1zGGtsFUWZLD0tt/lyTYFY5RDSTpWW0w2pwJWbt5nlWQqBkVM5ooWwk9VWBGDzgvxGCXVIRzT+KXly/1AgN1QhjTwmCO7uUfaaO/yFf33TwNPXbKiUthf6u0dW5baMV/+6WieiaMu5RHQedmwwdufWTpGP26Vo7zA27NT5Qi6H3Ka83C1WegmNoqIZmUghSIxhazhObDRHlPtQ5QLJ5h3cef1f6O/tc7S5e5b+Hm541tlzpZqwxhov700IIbcl1WiD1+6PBjmPNHZFfo1JkXvqZZLpsXWq8Qv+59Dmf0SsnyFy3hNMnB5hjR0AfKwNg7bCFh2ld3ql4Jf59sL/xhN1kkpMfesoOkzeLANvKdbWdzmCMTmU+r2/coBo69h/2E2jvyTvVffw+ucKD0gEDIELOOIXt2BPPxTR3+HGnifD3Hh4mVHisrijeBxCPElFybHGk4dZKfpEakoSK6wQNanNJlnXt1tP/V5q+0OkGHf64amQZGEtOgjASqTeSZUQCstzLBSxzD1vgEAIhFZhOibjdEua9x6QQlVUqpGJsx1ZKXZrPBNYRoISR4xs5nl33+TuTEPNMBNi4AiJveHPiH95KXzsK623K/fCZ3+AeM4P4Gv/gb3jNzBunNRcwqUnbayZLBDifKCrgA/isGPg394Oz3vJzOcyuAmefxbW0zCgXGrUAxT7PWOqAFLpOSk4NSTCI8HbxWPBWBg2OS5ZuJrX9P6NiSS/y+O2CHyRnK6EICSH17wGEyCkmCwmKkSWa1rgJNGMkMXkPziNQxHUiMCug/qOlLazJeULPMIhtqyd6P1ue1ldWRtPfifzOSrFIkN+G4iEg6ljpWAcA9JgsVYpP5RCbRXK26pkgJAp1vOy9Kp2p6V6g5zX1kbxhOQ5A4dxWtdSLlh0JL6QJIcnvPXC1Xzxs5/l4COPnpS0Z8HqrO01hBSTj8VVYc+qqe+D8WuNBW2t8NSPvVLux1YJ4kqMxSKtJd9bprajAg3X7OyFYBINlut354ck8j4o+VeM/euDP9s5wOIq02gDyc7FEYwns4T+9laluVVVEtJSQNSRlz23rwlUlLDt5MPj/FjFeJWQpK1IIypx1wvDGfSme4g4V/6r9/b0hclypkMm0IhMrTHXlYhgsNDGW2/5OYWRENsxR01TKuE4g/3aV50jwUe/MPO25z4Hzn0O4q474IYfwC03Y7fdBtvXQS2rlKOAzjbEymPh8BPg8efBmefMfg4b7oMLn4gdG4TDlCsecADjAElrDQmSXj9mgAp/GO6hZnY9tcRI2rzk7sd1DWKtmSoku5MNbmel3eQf07nGWGTeJfWPfx8RbI8pn1+gdlbPDYOjPf/Tk4bfXqc7w0NFSE6GeCLFs5qyiahZg7Zml5y7xjhJBOsqiU/+bafmsBSC2NpJcn56/ypetfwxPKFnxc4Xq3y+cPmlRFHEN770P/jtfaxYvgxjzO7Ieu+xF90KIdDWUqlWKWRh9LN2Y22m7955K5No96a27PQyay4qOyuyoIt9qJnfPax1RJ1FC7Y6thXOGGedsGBkokOZOA8UmyUrekjPuYGMmPdyV0aC4pQUvSded9aC8ODEFPuNLyJ2bIb//Aa0tfDaaODIo10DBBoeWA9jw64vPweLl8y+fzNu/i288jnYcPgRQdBwAJF0KjyW61Hu3mT5i2lrKVUkRmBs8dYFh43/elVX7exaUkCpKZ2UNTYTR+wUkTUJsDbbxqYgShKBJv5uFVEV2Ff1/tKeUv6PLWs6rql63QwUNlIQERK9TxxyGuS8pjqC9IKZyXkavv7Fz3HSicfzh5v+zJVf/xJ+ex89Pd0U8nmklPuPsHcDKSWxMWx4YCNprc5JZ57EaGWczo6ujKgP3OXj/kDjPTKexhRMSkG1iBC1uOIAsxlPmyCFINIpE0mEx9zeVXOFFILQpFS1pV0WZn1PuDWpoZJEWKWRccimfIm3/u2GPZOi3SV0u4oaCk4w2F/9DPHME+FfPw1PfdYculCw9GDX9hSXfwT7yUtcQdpDHxRBP6TZpg8Yko5QLLQVFlJjh8q3HDRCwkio+M1Q7z8t61h/W1GGvTWdc4M3W5a7cG07lYDMTkmz1oK2AtUJpfGQtdfEjCb5X7a9buDjoyvLP7eDFfw0IRfE+ywBlgCUkKypDiMLeZ7efwivOujk3ZJzM978+tfy5tfDY09+DH+6+a/85rc3sHbdehb099HR0UGaPnQxrFJKYp2wfsN6UB4XnP9snvi403n2s5/JJ6/6Kpd/7xscuWIVu8v9/GDPIUkStm3fTjg2AYHCLsrTpnJ7FaCyt/CkohZHbK3soF6zoHxOyHUwUhvhtvo2pCjTLnN0awhTS6wVuThExQnjOqagY0hCtoUhvV4epTzacYXmlJCsqQyy0hoWFMrcXBtjQAV0eDnS2ZLW7wZSCFJr2VAbYSEBC4MSd9SG6SGgw1jyWQk5KQShNUzomPUTg/hCcWR7L0J4+J19nF6f4Hl/+I0rNTX3t4cF1gK9WBJSCceCfWA9vOrZiGc+G974r3D4LPk19gb33gLvfTP2uuvhEFz9xL3XQfu4gl4PGQ4YkpZYIhRp5nLR6hZaIFAWX8ot6+Pe03qZ+EaXrZ+SIolFkEUDgtEWq13YsrFgrCDVlpzS5IqW6uaUG68TvxsPyx/Nndn5o2IhQI3GKGv22dJTZG1HXGdNdZR/PPgxXLzysTyxZwZL8xzwlje+DoBtW7fx7at+wPs//DE2b9rMosWL9jtRO4OrYOv2bUT1kKc/5Rxe/fKX8+Qnnj25zQcveisTlQpf/9nVLFu4CBF41CsVxicmCAp5Cn4OM70ySgtIKYmiiOp4laIq4ysfwxQ5r1m7Fl95/NOFz+OEY49FKMGaiS186iOfoqyLLFji3Lv25yrD8zw2bd5K54JePnjsuaSdRQpejhf1Hc49I1v56eg6Ooplbh3bxm+23s/CXInYz9NTbEd5CfhF8DT9pQJvXHIkPx5ew5AJWVvdRFILSU3APwwcxb+dcDKHdS/ki5vv5MNrbmJtfYxluVnyIc+Ahg1kTX0MH8kz+lbxrqWncHhbH1/afidf3ngrm8eGuLc6zqLSIjbWJihbxUGFfi446sl05IpcsORoAuFWb34UEqy9GfvNn8Fh1qn9dn+7a8CROKJzVtAIWOZDIcF+7mrEY0/Y9yR986+wV14PZwP5B+0LrXiIlSQHDEnPFRYIRIovuO93I4tOPait/trFcuy1gYmP8o2rJBIbRZIKF4ChDTJMkAgmjKeHN+qf3Hdz4X9uCNt+fMiCOh1hjIh8KO4bF1tnlxQMRzUGk0EOX7aK9xx2Fm899PH7pH+ABQMLeONrX80Jxx3DOc94Lls2b2Fg4QBj4+PoVLNjcJBGwqKevj6UpygVizupR4Rw2fuqtZrbZ9t2RMFHSIEZDykN9LBk6QBSukg/ay0PbNrEQGc3n/6v/+KpT2pdrujSt74HJSW//utNLOjsod6XUNI5tuwYYsPfVkOhwMKDFiNTQ4A7XmPZIqVEG8P9a9fSHhRZuHABgxOjrLvrHvoXLmJoeBglJc95xtN53cWv4slPesJOx17WtoD/ufxL3H3XnXT29ZHL5UjTdE73Q0pJvV4n1YaxsVHCinMQyZdLdHR0orOXYC4X0JYvsGnzZrSAr11+GWcd/bidzuPE3qWc2LvUfVgEO1bVyUtnLBcHm0m1hzaGglT4UvHmZScznsZctfkORqOQXC7HG5dP5VR+/bLjOb6tj5fc/lM2hBMsLbRNCrAC54xR1Qkaw2BSn7yukvLp9AtsDSt4QnBe18G8buB4nty5bKrvhcfzqgXH8MfRjfxqcB1XjN3Hib0H8faFJ3Fy12LyrULRvQA++FPQz4QrfwKHybkSdZi1KaTWGfeXAu17WMB2LmjrynJycMCFfM8FjziShoaXh8Gzhi2i6zMVv+MzweiOM3q9+pPV/2/vzKOkqO49/qme7p4eZmEdlIcGDBJcQDaDshhcILK58VCJGgVREOOCLM/gjkHfw+PRuGsiogT1jWsURFRAFre4IQICTyJGJTg4DLNPz9rvj2+V09PT69DDNJz7Occj03VvVXV11ff+6nd/v991pR2XHvD3CATqD68LuAL19dauksycrRbWZy9v9C7tsKfs26zMerKq6vDU1hEIJCftWjVHLPb6yynaV0jvw/pwc49hXN5nGNmupNSDbsIpQ4ewasVSho8YzdebPufo3v3Jyc5m7pwb8Hg81NTU8FzeS5SUlrLjn9/Qrl1bOnXUQ1Cwdy9FRcUc3eOX5GRn88fZM/Bl+BTfXF3Hxu1f8eKrr1JWVEKnzp0p2LmDK2bN5MH5d5Hh80U9rz/PuIWishK8Hq+yLwMBvti0iVXvriUrK4sX3lhKyd59bP1yG26vmyO7dMHr9bL1m6140j2cc+5Yrrv8Cvoe35svNm9mxVsrWfJcHmcO6MfM6/7QRJwdZk6fzrRLL+OJhU+zaPESysrL6dC+fdTrkdup08/WeW6nTmSkp3PhhPEM6NcXgM+/2Mi7766lba6u248/5rPt8w/xdezCqjdeY0hocfowdPQEzawF325B//ZYLjp6fFzZLXLC4LD2Xflq2CSu2bKahT9spFebXFxAQW0lRbVVHJ3RjhyXlxu7/hqPJaNjQ/keVu/9mrM6NBXnYNJdaZzSoRundOjG9TUn0c4T/Td2sBYsA8YRSEyoDQlgteQr4bGjxlHtTqdNjhd/2/aUZXYk66fdZNWWUeX2UeDLpSYnm+5pe6lNdzGodheD676n2IpcmrKi2qJn5zqGHVHGlsIssnI8uL1puAqLsLwu6r1tOKyqgPS6am9dPZRXu6p3dTiCjPJK3v64kiOKS8gqruezgnSOzKykUzcP7XpnUtE+jba1fgqK21CZ0Z5jvXv4qiiTbm1rcZWW0Dbdy/asTlx/2Gj2FNdzTG4Olq+Ywo078Ac8lPr3kD6gH93zvUzt05srB4wgM0IxnGTz3ocfsXrNOv4w9QoyM9vgCxJRv99PeXkFz+a9yONPLmLrBkWoHdu/P1ddMZmLLzy/SR+HVWvWsnLtOlatXcd5o0cxd04CxW6i4Pf7Ka+oJO+lV1j//ge8mPcSdZafsydcyDWTJjMyjAjvKyqmfbv483WqqqqoqKgkI8MX43p8jCujLWePHc2sG67luF696NCxcaRA4d5C0u19bNi4kXfefotxZ5/Drwf0b+YV2H8mbVrBMz99Cunp9G5zGFMPP4GLco8h0+WW1R7E3uoyOnqzWvaEbtwPoXZbUKwkFOv+h+Dca5J7bm8sIjD9cjgauTuSYE1bKw7cPNBBaUkHE0D+bJcVwEMdgUANaSq7VG0FII16vLXVuAO1eFyBpA30FhZul4v8wh/Bncn5fYcyovdQfjeqP9lpB3YVy2GDT2bY4JPDbvP5JFLXXT2Nyy+9hCcWLgJg2pTJZGVFX4X8jFOHc8apw6mprcXjTt6t4pzT1VOncPXUKVxy4QV8uuVLbp8zJ2KfRAQaID09nfT0poN96PW476FHGNivL2NHRyhhCY1Ee9iQwQwbMjihc2kJnu4ziuN3dcJtubjysD5kpUV+W2txgQZYsAzLWNQtwkEv0q1Fmstie+GPjOrZl1m/OZ8RPfrE0y0T6IK97Kb9WQBNqHxHC3vMsrIymXV94lZKMgU6HGPHnMnYMZFFMgZ90drSCc+4Z2Vlctvc/2rucSORi9b2CB6pLZQPV4lSuYPlqx5VjdiR6IHmdI1eSykO5gNlwP/s744AI9QthBHp5mC52Lzv35x1XH9eH3BRIj0XAx3QymtuNMPttT8rAQqB2fZ2Q2wsYAOwAJjbyuficBowCQmyQzUqhNkZ+AJ+DnsGRTrsBK4/YGfYQC9U9ih5GKFOOkakEyTN5YLyIjxt4IEB4xPpaiFLehbwOQ3RpS5UHOkXwK3IohqOxMcQnQC6Vt+39okE8QLwdxrXVAsA5wMTgIn2Zz8nodJ6MQclqOpFcjFCnVRSpKL8wYHb5eLH8hLwuVhz2u85ivhmwIMop8FycW7bemAfsBE9xHcBeRyYspuHAuuBb1v7JEKopqE2m/M7lyFRdHC21XEoStiCZVgXjIXtrZCuf4hhRDpO0lxp7C4rwl1VxYvjpzHkqB7N3VUsZV8A7AairJipU4qxvTkr1zanTyKDSYxalk1wJbj/aCT63ZI9SHr3Y5/NieFMVtxn81dANkKdFIy7Iw4sy0VZRRH1Ge14a8xUTgtXsDy5vA8MBJ6z/x6BhHsLcAdwAirceD+wMqhfNrLEOyMXShlyBdyKrPhI3AYci6r1Or7xeUSejBuI3DZeJAY1wMPAmgjtrwB+Y++/DiUzvIl89OE4F7kHMtGjXYbcP/eGaXsOsB3YZv9tAb8DnkcW6kygP5qwK0UTtLMjHBdgsL3dKb7pBxYBq4Hx6LfJj9I/WfRD5+5D16EU+Af6zaNxKw2/ZSn6Pe8Bvk7g2IcDt6O5krbo7a8YuBOVw40fx/Xx6hvQM83JMzQkgLGkY2FZ1NX6KagsY9pFD3Nal24H4qhlNH4FHgVcBTyDklufBW6i8YM3DPmzuwJLgBuQUB2N3AFDwhynIxK/U4DX7T6PowfzSyRYoVwDLEUCfqd9Ht8CryLhDmUxcCXwCXAjEpFNwBxgWZj281G0wXbgFjQh+AES44/RQBTM1UBoRskk4FeolOdY4A37uy0BTiLyYDLBbvudfdw7gH/Z5zMH+D26vi3NDWhQKAPuQ4PGejR4bQF7dZfG9AC2AkOBt9Fv8QQatD5DA2U8DAE+RUbAX4AZ6L7LRvfXyIS/zYJlWMMHwtd1zsLBhgQwlnRUlLJclr+DLiNvwtc1fCxyC3ACigJw2IysuQeAy8K0744s6qk0tk63Aa8gYV2DHuTgSbb/tY8zOaTPu0jo8lCthTJ72zgkWCfSYLmCROQvSCS2ACvszycBvYEBIee7GVnF+Ui4nWWcf4Mm1k4EioLafwU8gsTjceDioG27aOzrBdXVfg1Yh8Q9+Lu9jizSGcCfg7b9ElgI/Nbe7nCLfX7rUHLxv2hZLkLC3BcNlA5b0TV4CVhF42vqRb/vKzSOEtmKxP4ZdP7fA29FObbbPvZ/28dy2AYsR0L/dzRQFSXwneCmJ+HLk6C0BjJdh6IXvsUwlnQMKop20Wv4ZAaOvytZu4yVqjQIOAOJskN39FDMiNDnIfQgRnIfPIxWbnko6LPRKKZ3ctge8DR6OINdA/OQiGwL0/7/7LY3BX12OrL6w1ENXIveABzGoIGhKEKfK9FiR9EI2PvxoUErHE+EHBckxi/TWKAdSoFL0BtGy+T4Czd6kxhLY4EOZoJ9HtcGfXYTGqwihfGtR9fiwRjHz0WT2I9E2P4kEvtwb2XROaof1uzb4KfAQVk/ozUxIh0By3JRXZaPq00uvSY+lawLFSByyJMXuBT4CPmIg/2eWUg0w5GLwvdiZWXMRJZ0O/vv8ciqjMbLNIjSOOCbGH3uR4+g4xLYiYQ6EnnIjeOwIUb7DWiQiEUpclVEYgtNJ/G6IxGK1mc5chG1FBPQNV4eo93ddluHM9E9E42/IgEeGqVNMVpzO1pK5cXoHk2cs26AnjlQYRzTiWDcHeGwLOpr/PirKul7wZJk7TWA1rB4Hk2+pNNQwKwMCW0Get1/IaRvGURcx+509GDHSkrYjazdvsBaVAk4Viy249METb5lICs/XJ6x8+QdiQaCXchyW4bcH3OQLzqYGrudQx4SgQ/t9u+FOc6uMJ8FY6Hv+WGUNvuC2gbQQAfh3xBCz7clnaqnolDMWCxFE6sqoq/zWhtHv8/QG9T7EbZXICv6dTR3kEfDtXIojOM44UlLg+IS3cnZGIs6ToxIh8GyXJQW7KTfkGvp2j12lbMEqEOv0zv5eeVFXMBe5KeN9IBaSNTD0R1NdMVDOfKrgiymeBIZHO9hORLgMYSfuAog63Q58E/7s73IcnsQuVq+t4+5nPCThgBno1f+29Hg9G/ki36e0BKXzcdDY4nogAataAu6Wuh5aUlvai0aTGNRju6Htijyo4D4ahzno+8ZjYUo4/Uy5Ar7BxLqZ9nPNTSxLKxTzyaQ9zqU10G35BQ7OtQxIh2C5UqjJH8bAwacQ5+JySlp4OwaicDdJDrpEpt43x+DH4lEy3h1Qg/qPQn2q0cTlz4UWTEOWcu3ISF+M0yfW9D1OhE4C1nvs9BEXzSXhIOLxO5tJ7GktUMPnOSWeEijwV0Zr9TFu4bKUvu/o9Cb2kDk+tqOLPjmDVRuL8x7DWv0O7D4HgJLV+oI7VvSzX/wY0Q6GMuixl+Kx9eOnqPnt8gRaM7MeHS+o2n0RCQyaPB1F5DYWm1u9s8f60ev5M5r+SXIoj4dRZOEEkBhe5/Yf/8WvYZbyL+aTPah36S1szxdNLheouFDtUGK0PVoT0O8ejQ60hCpEw87kWW9EL1trEC/2egE9tGUk0fCySOxBs4jsOgO2FMT+T3RYCYOG7CwAgGKd2+m94i5+No2Y6HL1uE97Eq5Mdp1QOF0TtRAJQr1i8ZQNMEICvFKpEZnrHtrCfAnGsdWR+vzNvCfKKwv2RSgQei4KG2caoUtKeQfoTmDWIxAA249cillEl/ERR8ix4hD9MiVQhR5lEv0ycf4mXI71gNv6p1uD60/RKYoRqSDKN63m0EjpzNoSEKFk1qb75FFdWeMdnejpJMC++9lyDqNxhzgPPvfy1Em4yVxnlcGmgCNVgj6nZC/n0aJNZH4AAllsgmgydeLo7RxIwGNlrm5v+ShCdrhMdrNRfHKDqvRgBeNiWg+IvSaBzMzjv1sQk6K5DBwFNZTK8Djko1/ABcSPlgwIo0mCmvK80lrk8uJEx/F6z7oLssMJKiRRpcL0SRQcGzta8gF8ViEPkNRJt8tQZ9dD/yNyFbbbBriqiuQa2dhlPO+F/k5HapRwk4kHqZxCdBksgANSJFcRy8jS7QgwvZkUI189auJLISPoOf2vqDP5qHwynBp8yCf8vPAH4nuT/4I3UuRCtMMRXME66LsI3H6nIl13Vz4Ca0kbWiE8UkD9bV+AumZDDo/njmp/eIXNM/71jnG9k0oKmIJcgk8hsTkcGAaSuUdQdNqcRNR9tpy4FGU9puFBH08EvXgDLu3gMtR6nQe8g2XA91QvHN/YIrdNoAGh5XIal9MQwjeILt9ALg5aP/TkYW4xv4uH9htetnbfkXTOOquNPWtx7rO6XabYL5B2Y/voGgUJwxyBLoW+fZ3yKV5NZhziC+l/En73D5CE7XPIPE+Hl2z/6DpNahC4XsrUYbnX1GWZhv0W16MBs9XQ/p1Ra4Sh7UoMeZ9VDdmCRoUM5GbaQpKWY83mih+zpsN995FoKK41WdvUw0j0kBdbSXe7J507JZ4IlWC3EuiBWpEaNx0OJYCx6AkjmuRm6EU2SfH2v8PZQ+yDuejkKtslGJdjYR9c5g+i1AM8s1I1HKQaH2LChsFh8n9YJ/TPJSe7bhK/Ei4Q9M4a1C23UxUPH8c8pP67XMZS9OolEdpbI1D7Ov8A+Gtzr/Z32Mmsj5BL+FPIsF8iqa1Q+LlE5qmr0fCsaanowgYn913PbqW4fgaDZbz0eCbid5m6pCoh4uJf4ymoXsPocH0KuRW8dn7qEUDVqQY6/3D41E6l7Gkm2BEGrk7AnVVVPsr8fgyYndoHgFkfTaHcNEP4diNLGfQLR8t7jcYx6URb59tqNhQvH1ut//vTA3FCjNzXuWdMLNoUQuvhfwdz3XeE6XNevs/59kIHhS6E7/QhrKN2MkywayhYZIvnsgNB+e3jKdP6LULd2w3iYdrJk51le4K45NughHpQ5d4BfpA9kk0H7iuGX2aS+hgEypMpyL3wc4DdD7BxCvQ+9snHAduWWxDWA66GTKDoYU4ClnQPSNsX4zcIeZ93HBAMZa0wSB2ID/we2jS7ClkRY5BE2+riFwdzmBoMYxIGwyiDvnOl6MwtNtomLB7kMhVCA2GFsUKBEz1bYPBYEhVjE/aYDAYUhgj0gaDwZDCGJE2GAyGFMaItMFgMKQwRqQNBoMhhTEibTAYDCmMEWmDwWBIYYxIGwwGQwpjRNpgMBhSGCPSBoPBkMIYkTYYDIYUxoi0wWAwpDBGpA0GgyGFMSJtMBgMKYwRaYPBYEhhjEgbDAZDCmNE2mAwGFIYI9IGg8GQwhiRNhgMhhTm/wFx3fxxIG8xeAAAAABJRU5ErkJggg=="
header_data = base64.b64decode(header64)
header_pixmap = QtGui.QPixmap()
header_pixmap.loadFromData(header_data)
#Assign Pixmap to label
self.mw.lbl_header.setPixmap(header_pixmap)
########################################
# Buttons #
########################################
#QtoolButton Assignment (Resize window)
self.mw.tbttn_more.clicked.connect(self.expandMe)
##Push Buttons
#MENU BUTTONS
icoSize = 32
#Geometry Button
geo64 = "iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAC4jAAAuIwF4pT92AAAF+mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTA2VDAzOjMyOjQwKzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwMzozNDoxMyswMTowMCIgeG1wOk1ldGFkYXRhRGF0ZT0iMjAyMC0wOS0wNlQwMzozNDoxMyswMTowMCIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciIHBob3Rvc2hvcDpDb2xvck1vZGU9IjMiIHBob3Rvc2hvcDpJQ0NQcm9maWxlPSJzUkdCIElFQzYxOTY2LTIuMSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDowMGRmMTBhMC0wZmNkLTYzNDUtYmU4OC05ZjY1ZWY4NWI4Y2QiIHhtcE1NOkRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDowYzA5NjQ3OC02MTg4LTI1NDAtYWQ0ZS05MWVkNDgzMzQ5ODkiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpiYWQ1Y2ZjMC02MDFmLTdlNGMtOTliOC1mMTI3ZTVkYzU4OTkiPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmJhZDVjZmMwLTYwMWYtN2U0Yy05OWI4LWYxMjdlNWRjNTg5OSIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wNlQwMzozMjo0MCswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDowMGRmMTBhMC0wZmNkLTYzNDUtYmU4OC05ZjY1ZWY4NWI4Y2QiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDM6MzQ6MTMrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz4Mar9rAAAHSElEQVR4nNWbedBWUxzHP5WEt8UyRShLTbZCxUQx2cZatkT2NTMS2cJImZSmsozSVEKyZCmRwUimJExpEKUSlRoVWojeadL29cfv3nnvc597nvXe+759Z8489znr93zvWX/nXCRRTa6/DCslnVZdPGpJImWcB0wADgr5fwDcAmxIk0ztFMtqBswCppFdeYCuwHpgQIqcSKOZ1Zb0tKLxu6RNEf5/SroojS6QdAF3SvrHUfl7vTj7SJruiDNPUvvdUYDOklY5KjVF0t5evO6STvKez5D0myPNJEmNdgcBDpI0zVGJpYHKIulkz39+KI8HJO1y5NG3Jgsw2EF6q6SeEfFdAiB722878lst6ayaJMC1ktY5yI6Q9fGodL4A3+fIu51sHIjCTElHVacAHSQtc5D7SFKTPOkbS3pE0uUFlHWBbGaIwquSKtIUYH9JUx1kflWyq7r+isZ2Sb3TEOBRB4Edku4uMq9mkiZK6lNkugMkve/gsUJSpyQE6CJpraPQCZIaFFkJVNgYkMt1lLTEwekDSYfEIUBbSYschcwotJA8AswvIw9kY8hfDo7jJNUrRYCGkt5yZLpG0tllko5TAN8NcfDdKum2YgRoL+vTYWxTvAuR+p6QbWLM82DZDBSFz7wy8wrwTETiTZKax0g0aXePQ4Qh4bhR2+F6EX6NgHeBLjFuRI8G1gEvxZhnXWAQ8LAjfL+wR5QALgtJe8xoMRNoWgq7EBoAjYF2MeQFcBkm6KPAgY44WXXLZxD5A/gl5HcmsBYYWCTBpNAc+BJrofuGwiYDP+VKnE+AdUArrFmFMQAT6JxCWEbAfxvbS0wPMBRYBXQK+W8AOgNXAjtys8geQEYFBo3lAf9mco+wsyS1KnKgqiWprqQ6JQxy10naGMFju6QHQ3GXBsJHFzIIuvAbcJGn7JpQWGdgKfAchdsZhb39nUVwOBZYALwG7B8KmwIcAAwvIr+SjKKzgUOBfhFhvYG/gSsKyKctJsKsAuLWBcYBi4A2obCVQEevzH8LyCsD5ViFhwBNgOkh/4bY4DMHOCJH+j2830Z5yrkG2AT0jAjr65UxJ08eTpRrFl+P2fnPAZaHwk4BVmDdoqKEvNsB3wITgX1CYW9jU+hTJeSbgbjOBWYALYFeZPdpv1vcHvKvBP4Cfgj5+y3oW7LXCD8CrYEexHWAUsQsUMwa37WRWqTca//eknZGpPtP0i0lcIl1FigUldgb6kB2t/BH8eexN90c6x4n4J5FXsXGifEJcE30aGwe1i36ANtCYbdjq8mVWFOeiy24gvgRW37fCGxNimQaZ4Mjsbc8JuRfAdQC9vKcj03A5dh0913S5NI6HN2BDZBHkrtSw7Ed23tpkIJ0T4cBfsWadXhOX4rN5w+lzCdVAdoCi7H1wYves4/x2HgwEtty10qLVBoCNAU+xJr+McBtnn9wvbDR++2JGV02A/enwC0VAQ7BNlFgb/pu77lOII5vqTkX279XYANh4khKgLOBz7FbId9gfbsHcCuwxYsTbOZ7er9fYK3kCcyyAzZ4TiPaVFc+Yl4JNpX0ZiD94BxxFwfiPZIjnn/++LekO2r6SvA07E2DGTtfiCHPXsB8zNzVj6pdZCyIQ4DuwFve82RgBHABNtitiiH/6dimaCjQH1tTnIvNFllW3qJRRhdoJmlyIO5NRTbNQrtAlPvUS1cpO5StXR1d4GaqLD+TyDaMJImnMGt1BdbiWpaaUbEC3EDV6DwG+Bg4H7gK29ykhU+wzdNzwDDgZ2wlOQrbYRaOPF1gmefXRJmXIuK4BFFOF4hyY728tivzDLOsLrDL+x0IXOI9v07mMram4A1gCTZLDMdmJKiqQyTyTSm+QKOB07G7vPNK55goZmMGl2HAcdhpEWRutbMQJUBwhdYCm3ufwGxxuwOCO8obgcMD/7M2WVFdYGPo/2DMqNmtXGYpojbwLHYrPYis7hAlwFDsBDiIfYF3MAtu+GCipuF64B/MFBdGuF6RAmzBNjNdsMPRII7HjJoTsePtmoQTgYWYEbV+KOxLbJqckpWqgOnFdTdvp6T7asA02FCZG7Ag/pBdsnSmL7SQ+qqaZ8PYIKlHNQhQR9KTDk6VynExqhQBfNdC0hxHod9Lap2SANdI2uzgMUp27F5QXqU2u66S1jsIvKaI21gxCXC8pAWOcr+QdESxdSlVAN895iCzQ3ZTKy4BGkh6w1HWn5IuLLUO5QqA7O7uyw5yqyV1K0OAPSQNUvS9xUpJd5XLPw4BfNdS0lyHEN9JOq5IAa6W+3uj0ZL2jIN3nAL47mK5x4dXZHeDUOYd5KAAbST94Ej/laQj4+SbhAC+G+ioxL+SblbmlyC9vDSvO9Ksk91Yj51nkgIg6UC5FylBzJb0U4T/FkkPJckxaQF810rS1wUIEcRYxdTPc7m0vx2+FDOXh6+4BTEX29AsS4NQ2qfDU7G7fIMjwjZgVqdTSanyQGpdIModJmmh7CPJx6uLx/+gYIYlfCKQYAAAAABJRU5ErkJggg=="
geoData = base64.b64decode(geo64)
geoPix = QtGui.QPixmap()
geoPix.loadFromData(geoData)
self.mw.bttn_1.setIcon(geoPix)
#self.mw.bttn_1.setIcon(QtGui.QPixmap(root_path + "\Assets\geo_ico_64.png"))
self.mw.bttn_1.setIconSize(QtCore.QSize(icoSize,icoSize ))
self.mw.bttn_1.clicked.connect(lambda: self.loadWin('geoWindow'))
#Shader Button
shop64 = "iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAC4jAAAuIwF4pT92AAAF+mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTA2VDAzOjMyOjQwKzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwMzo0OTo1OCswMTowMCIgeG1wOk1ldGFkYXRhRGF0ZT0iMjAyMC0wOS0wNlQwMzo0OTo1OCswMTowMCIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciIHBob3Rvc2hvcDpDb2xvck1vZGU9IjMiIHBob3Rvc2hvcDpJQ0NQcm9maWxlPSJzUkdCIElFQzYxOTY2LTIuMSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpiY2ZjMmU3ZS0xYTBlLTY0NDQtOTM5ZS1hMmMyZGNkM2RjOTQiIHhtcE1NOkRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDpmYjE0M2MzYi1jMGJkLTliNDEtODhlZi0wYTNmMzdmZGE1NGQiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDowYjgwOWYxZi01ZjIzLTcxNDMtYWU0OS1jNTNjYTZjNGMzOTciPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOjBiODA5ZjFmLTVmMjMtNzE0My1hZTQ5LWM1M2NhNmM0YzM5NyIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wNlQwMzozMjo0MCswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDpiY2ZjMmU3ZS0xYTBlLTY0NDQtOTM5ZS1hMmMyZGNkM2RjOTQiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDM6NDk6NTgrMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz6zH51jAAAPNUlEQVR4nOVbbYwcxZl+u3tmdnfG9jLLrtdrWGxjVr6sjW3sQwbxsTpYshgRErg7cZzC3eFLAgkJZ8dSgiKUmPyIdH98B9JFJ4ROQlic0OUHIudjifEJZNkCzgR86z0D5sM23ll/7Mfs7kxP18dblR9dVV3dM+ud/bJPyiuVu7qmPdvPU8/7UdU9jpQS/pjNvdI3cKUtdZn+zmrO+aZSqbRmamqqvVwu5yqVSp4Q0sw5RwAYTafTI9ls1m9ubh5rbW09m81mBwDg88W+MWcRXaB3YmKiZ2Rk5Gujo6PNo6Oj54vF4tjU1FTR9/2y7/vlSqXiB0EQEGWMMea6biqbzS5pa2vrWL169dLu7m7/pptuGnRdt38xbnKhCWillH57dHT06xcvXrxqfHz8YrlcnqSUEimlQETBGGeUEkIpJYREx0qlUgmCoOL7fnlycnJqZGTk4sTExLjneZnOzs7rb7311jV9fX2kq6vrdwBwdqFueKEIaKWUPlUsFh8aGxvzy+VyCRERAIBzzgghQRAEFUqpBhyoRgihAaWEBkFQUWqgWhGUUub7vn/u3LlzhUJhqLm5uaGnp+f2Rx55ZMW2bdv2A8DQfG983gQIIXZPTU09OTk5yQghAQAAInINVrVLEBAdbXdQXaqup4wxHB8vTp08+emnjuOw+++//4Fnnnlm9Zo1a56fz/3PJwt0M8bemZqaejoIglQ6nW5oamrKeV4q5UTmuq7r6qPqO47juE7CAPQRAPRJeOoAgCOEEE1NjZnu7u6vdXR0XP/SSy+9vm3bLS8+99xzPwaAmy8rAVLKhzjnv6eUbnYcx21oaGjwPM8LgTquxmuxEKFxYl2IwIdA9efhNY4DEPbVOSAiZjINqc2bN3e5ruPs3Llz74MPPphnjP7F5SLgacdxfiOldFKpVMp1XQ/ULMXxAUQTGn1m9fU4KApqXA+gibG/znEAOOe8ra2t9frr16547bXXfrdp0+b+jz/++MlFJUBK+aKU8ldSSnCsu10Ek9P0Y4YohOe5zpo1a1acOHGidOONN/7LwYMHfz6bP1Q3AUKIf6OU/jUiMkTkiIhCCCGlFFIZAMhwKLxn6yjVQSpA1pD6IPoOA9wain0cXRYOIiJ2dna2cc6ht7f3l7MhoV4Cfur7/rc0cETkQghNgm5CCIGKEKHIMaCTpnBIAIPQRg26xQFXm/6IMYbt7e15AIDe3t5ffvHFF3W5Qz0EfKNYLO6ilFIhBDLGCGOMKCXYalBNajKElFFfkSOljIixxKPPhU0WKLWAUU0VkWow5IJzjvl8fikAwB133PGvvu//1XwJuG58fPzFcrlcllJKxhjlnDPOOWOMMdXnigTOOedKGCiESB6FIsjiREqLLH2uaRJSDWq3ShIWEhNyo29YCCGy2Wy2UCjg3Xff/SkAbJ4zAaVS6dXR0dHzjuM4nHPKGKOKBGqToVtIAhpCNHjdt9VgUyClFGjrRkqRBFvDpWxJmL4QoUelUqn0u++++/tnn332z+ZEgBDiqUKh0AChtHiN2af6qMAzRDREYGS6L6wxTQIiSkOEdawCXUP6NhlVpyo9w549e/7p888/3z1bApqGhoa+XyqVJpX0iWoBY4xQSgPrnHLOCVPGQ6tShtUst0AUAoUmR9reofiwdWGN2SSAyiQAYAIFSCkFqDrjscceewsArqqbgCAInh4aGvrS87wUpVSDp7quVyQQ5RKEUqrdwyhCKYVGrhFaSALniPrfmAkdLiKXMGow8UHHiGjMhAuVUQBUXwIAHDp06Nj777//d/USkPvqq6/6fN8PEJEr4Bp80mzwJjZosnSQZIyZjKFo0GTYbiIQha0ORUREiq2K6gwSxQGAMGDaoHbt2vWfANAxIwGU0h1nzpw5lUql0tbsa6D2Co/QhDos8JRZxnlMAAnsOnsIo4BIBSZW6HE7LoCUEuJhIqYSYeM6cuTIZydOnPjmjAScPn26b2xsbEwIwRNAA+0CqhGbEMYYIYSYcStTGPcIz5GFLWRDqQMVKYkYIVDLXKdKHSRVBNHuAULEMoKRv2179+49l8ScJOD2U6dO+QDgEEIqhBC9SVGx1vCBAmqPVdSYJkmrwBx13aAP2jW0QDR45R+2GmLxATEErwpORY5JmzFVJAnYt2/fm5TSB+yx2Kbo6OjovcPDw2dyudwyQgj1PM91XZfba3lQkdXWnSVTI2krG3BmGaXUHqM2aF4jKoYzHQuOUgg0qtDCSGTMmPy1BUFQ6e/vX/vAAxEHMQLOnj27dmJiYqCxsTErpZSKAM9e2+tVoEwwEOE3JDDGmJa5iQmIaLiIwgMm44I161xwbgiQ8XrKVI1g0sAlCAAAeOONN0amI2Dl0NCQ4JwjISTwPM9zXc/zPNfTWzsQreNV8anlJ03AmkYFjDHGGeOM8yhGIiKGY4xjWERxixEtex0P7Gwg7SYl2vKPlqM17NChQ+8DwEYA+N8YAYi4bXh4+CvHcRxCCPFCM1tajuO6Zu+mSgEy4QXVJGjfJ4TSEDTnoTvYsSHKEJxHX6CKpUQssMkIJ2Cm2QcAGBwcPHHhwoW/Wb58eZyA8fHxVSMjI+czmUyGUkoMekNAzAPsOtwUatZkGSQ2A5bs7bCAaszEgVAc+hyFio8SMe771rmZfSHEJQlQJCxdvnx53AWKxfGWiYmJL/L5fCuAQ7QD2OBd140FQQCzuDVOmXRiG3/Y0bNNmYqRXIE2l9iBERF1DLBmPwp6kRtIKYTAmcADAJw6dcrsZhkCJiYmG8vlcimXyy0TQgg3YbHpB1N7y1iVHnEgFHZDgB3tIhJiqVBfK9Q1OgiaTBDFgbgLWETMOPsAAIVCwcQIQ0CpVGoul8slQghBRK6jfwTe1Tu34DiOIxMWJ4CjXhZrCSsSbJfgkSqYahx1X8dERK5dQGDc/2XoFtod6pt9AIDx8XFzrSEgCIIlvu/7hBDCObdDwHQxQEdeIaWU1oxE0Uqg0NmNJ8yukDV4zplWBIbnXITBMATNOYqwBoiAS2kKo7oJCIKgmgDOOQ+CgFBKiavyn05/+qBnX+F3agsgxoJdHJlyV/m88f+wMZUV4vVAeM619E0JrIohIYQAIbBu8GryTN8Q4LruhHoORROz7yRToPVFNgN2OrRjga0AAyoOnhvwVhOc85jsbfnr6C+E4DOlvqQ1NjZWB8GGhoYxxhgnJCCel1LVn+uET3qiJziJNYYBb+XDWMCqns2IAEQuGDMrA0TkWvpJItAiw3Y3gTi72QcAyOfzXhUBS5cupYgogoAEqRSmbAIAAFQlCOGjKnsHRqg0LKKCQAi1tq9e0CRIUH5vpF4DNE/Ovl4WS0TkswUPANDR0VFNQGtr61QqlcqUy2W/sbGpMZ78dPRz9KMqveTUG5GqJhBVwcC6+RgJCZfQxIhI+tGYmnXTFHi8VMl7KVu1apXpGwKuueaaQnNzc/7ChQvnVQp0kgYQL4X10Y4DVkCUYSYIj6h2fCJgusozrmEDNgsgLXmrFpBCCC7qLHpq2YYNG6aqCMhkMv9z7bXX9g4ODh7PZrNLpiMgaTUISKpAhiARLTnbAdIEOx7mOftchOcow511Q+acwa9bt+6GFStWDFQRAABn1q1b1/Hqq6+Or1y5shMAZATddZJZQAsAABLbUpEr6Jmzn43UiuoWCRjNeKQAm8i5+r222267bRsA/HstAuDmm2/GdDrdVC6X/XQ6naqSQPREO/ksbyYFxPrxoBbJW8+4RYL+v1IJZl7gAQC2b9++AgBM2ky+InNXX1+f/8knn/xfR0fHSpX+3Ai9A8mttloE6MrQVoMVvZPlrBnnHFHv9ljjqvjhbL7g0+l0ulgsfiubzf6HHku+J/jffX19P3jrrbfebWtraxdCiEQGANASMPBjj6k0aNBlgUWALWULvBBhhYfJz2zi5j3zAAAPP/zw17PZ7Gv2WNVLUkNDQ9/bsmXLC7nckvYlS3KNAInXPiIC1P57bF0gpbUro/6NPdGxlWADxWhtr0iQUogw8i0EeACADz744Adbtmz59SUJAID8448/vu2FF17o7+7u7lQ3YAiokQZBn9eIBXZQTKgBpRBSqrpeJK9BRD7XPF/LNm3adO1HH32UAoBTMxEAg4ODOzds2PDPHR0r801NTQ12AkwSYPeV9GtlBFsNUhMQruLMTo7Z0Vkoydv29ttv/0NPT89zyfHp3hNc9uijj96+b9++/1q1atVyIfRjJhMHQEoAx9EvJ4QUaMAAEnR5XFsJ4aMrC7jZWZazXNjUY1u3bu06evRoEQAu1ksADA8P//i6667bm06nU/l8vlkIKfVWAEBVOWxOozgQzjiE7w3FagVVxeonOvppz5yLm5ns2LFjP9u4ceOvan027fsBHR0de59//vlfVCoV7vt+wDlDtYEp1K4Ncs6F3rxQn1kLGaY3OE19rxc4QqBUhQ9HRLaY4Hft2vXExo0b/3G6z2d6VXb99u3bW/r7+w8tW9a8xC6EonpAgnrZycoI+hi91KHVYRLEIoI2N79+/drjx4+vBIBD010z47vCnPM/v+GGG357+vRpmsvlcmBVQhp42I/krz4FPaRB67g4b2R1WC6XgzNnzvxtS0vLS5e80Jqdadtnn332Q319Q0NDo90ymUxjJpNpTKczDel0OpNKpdKe56XsR2qLjraGHT169Kl6sNVFgJQS3nzzzV/oL/c8LxWCDJ8exMFeEbwxe+WVV35WL666CZBSwv79+5++QhNat7388ss/mQ2mWREgpYSBgYHHOzs7L9dvjeq2lpYWOHz48JOzxTNrAqSUUCwW//LOO++840qD1rZ169bNhULh23PBMicCVLttz549P5z59hbXdu/e/V0pZd9cccyHAJBSOh9++OGP7rvvvtsvN/C77rpr6+HDh3dKKbPzwTBfAnTrPHDgwPfuvffeP11s4D09PRtef/3170gp1y7EvS8UAbp1vffeezueeOKJh66++uqWhQLd3Ny8dMeOHd985513/l5KeeNC3vNi/XAyUyqVvnHgwIFrDh48WDxy5MixgYGB4zz8leiM5rourF+/fv0tt9yyqbe3t/Wee+45l8/n9wNAeaFvdDF/OWr+BgDcVCgU/uTkyZNXffnll9758+fl2NgYL5fLAgAgm806+Xw+1d7e7q1evRq7uromOjs7PwGAjwBg3nuBl7y5y0DA/2v7o//1+B8AAWeTU9WOLOsAAAAASUVORK5CYII="
shopData = base64.b64decode(shop64)
shopPix = QtGui.QPixmap()
shopPix.loadFromData(shopData)
self.mw.bttn_2.setIcon(shopPix)
#self.mw.bttn_2.setIcon(QtGui.QPixmap(root_path + "\Assets\shader_ico_64.png"))
self.mw.bttn_2.setIconSize(QtCore.QSize(icoSize, icoSize))
self.mw.bttn_2.clicked.connect(lambda: self.loadWin('shopWindow'))
#Render Button
rop64 = "iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAC4jAAAuIwF4pT92AAAF+mlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS42LWMxNDUgNzkuMTYzNDk5LCAyMDE4LzA4LzEzLTE2OjQwOjIyICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOmRjPSJodHRwOi8vcHVybC5vcmcvZGMvZWxlbWVudHMvMS4xLyIgeG1sbnM6cGhvdG9zaG9wPSJodHRwOi8vbnMuYWRvYmUuY29tL3Bob3Rvc2hvcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RFdnQ9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZUV2ZW50IyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ0MgMjAxOSAoV2luZG93cykiIHhtcDpDcmVhdGVEYXRlPSIyMDIwLTA5LTA2VDAzOjMyOjQwKzAxOjAwIiB4bXA6TW9kaWZ5RGF0ZT0iMjAyMC0wOS0wNlQwMzo1MzoyMSswMTowMCIgeG1wOk1ldGFkYXRhRGF0ZT0iMjAyMC0wOS0wNlQwMzo1MzoyMSswMTowMCIgZGM6Zm9ybWF0PSJpbWFnZS9wbmciIHBob3Rvc2hvcDpDb2xvck1vZGU9IjMiIHBob3Rvc2hvcDpJQ0NQcm9maWxlPSJzUkdCIElFQzYxOTY2LTIuMSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoyOGYzYzE4OS1lMjYzLTk1NDAtOWE4ZC04NzM0N2ExYTY2M2MiIHhtcE1NOkRvY3VtZW50SUQ9ImFkb2JlOmRvY2lkOnBob3Rvc2hvcDo4MTBlNGFlZS1mMjdlLTk5NDAtOWE0Zi01NTI2NDljMDY0MjgiIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDphMTdjMjgyOS0wODUyLTJhNDctYWViYy1jZTM3MGVlZDk5MGQiPiA8eG1wTU06SGlzdG9yeT4gPHJkZjpTZXE+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJjcmVhdGVkIiBzdEV2dDppbnN0YW5jZUlEPSJ4bXAuaWlkOmExN2MyODI5LTA4NTItMmE0Ny1hZWJjLWNlMzcwZWVkOTkwZCIgc3RFdnQ6d2hlbj0iMjAyMC0wOS0wNlQwMzozMjo0MCswMTowMCIgc3RFdnQ6c29mdHdhcmVBZ2VudD0iQWRvYmUgUGhvdG9zaG9wIENDIDIwMTkgKFdpbmRvd3MpIi8+IDxyZGY6bGkgc3RFdnQ6YWN0aW9uPSJzYXZlZCIgc3RFdnQ6aW5zdGFuY2VJRD0ieG1wLmlpZDoyOGYzYzE4OS1lMjYzLTk1NDAtOWE4ZC04NzM0N2ExYTY2M2MiIHN0RXZ0OndoZW49IjIwMjAtMDktMDZUMDM6NTM6MjErMDE6MDAiIHN0RXZ0OnNvZnR3YXJlQWdlbnQ9IkFkb2JlIFBob3Rvc2hvcCBDQyAyMDE5IChXaW5kb3dzKSIgc3RFdnQ6Y2hhbmdlZD0iLyIvPiA8L3JkZjpTZXE+IDwveG1wTU06SGlzdG9yeT4gPC9yZGY6RGVzY3JpcHRpb24+IDwvcmRmOlJERj4gPC94OnhtcG1ldGE+IDw/eHBhY2tldCBlbmQ9InIiPz5I2ObQAAAErElEQVR4nO2bX4gWVRjGf+MMZltaukhLRosIkrHF0j/xQqKoi0iE9KpuBBVCKdCEkOjfRYGRJBRUlKBQXRSkqIGyoBd1YRFUtpiaCWplIIgSlsr7nvd0MbMwOzu7O9/uzHdW+h6Yiznv85153mfOnznn8EXee/7PmBZaQGh0DAgtIDQ6BoQWUAXOa5fz+przOuC8rqyz7miqzwLO63LgPSABLgELgd44Ss7VUf+UbQHOa6/zuhfYAxwAFgEXSY1YU9uDvPdT6lKT6Wrykpr8qyYn1eThXOyCmng1OaMm0+p4XvCEC8k/qibH1ETVZIuazMjFurPkh64n63jmlOgCzmuP8/oZcBC4C1gXR8nmOEqu5miLCj9bXcezkzoqmSic1xhYB7wJzMqFviuhLyzc31mHhmAGOK+LgfeB+wohBY4VuE+RmjSEf4A36tDR9i7gvM52Xj8EDgMzgE8KlONxlEjG7XFevwR2AT1ZfB9wdxwle+rQ0zYDnNfIeV0FnABWAS8D/cDVAnUw464GjgMrsvI/gZVxlCyPo+RsbcLaNLr3qcnX2eg9oCYLcrHvC6P7x2pyMHfv1ORdNZnVhLamE79JTd5WE1GT82ryTCGeqMmVggH56wc1ebBJjU0mv0JNfs8ls2mUllGW+GU1eUFNkqZbZ+2zgPO6gPTb/YlC6McS+kMlZfuA52rt52Ohxjd+g5q8MkaTnpPjTleTV9Xkai7+h5qsaPqNN9IF1ORxNfk1S2SHmgwWkj+b4y5Vk19yMVWTbWoys93JT9oANbldTT7PEjmlJo9l5ZcKBuxRkzlqsr1Q/q2a9IdIfFIGZKP3BjX5O3uDW9WkK4v1ljT/w9ksMHR/UU2erWtF11YD1GSJmhzJEvlJTR4oxJeNMa15NflUTW4LnfjQVXkWcF67gS3A2lzx03GUHCtQ7x2lihPA+jhKDrU2TDeLSgY4r0uB3UB3rvgacLKEXlzcXCFdyGyNo+TaREQ2iorNfn/ZV1qB06MmXxQ4u9SkN3Qzr6MLnC4p+xnSRQ7p4uYdYHYW+w14Po6SA5N9QU2j6mpwE+nyNY9B53U+MADsIE3+Cukqr+96SB5a2BZ3Xm8FvgH6sqJdpJ+7N2b3u4GNcZScqVljo2jpXMB5nUdqwvxc8V/AmjhK9tesrS1o+WDEed1I2t+HcAG4XKeoSeIM6XR7tAp5IqvBomPdDJ8eQ6MXeAtYVoVc93LYgA3AYElsLrCd4bu/Y/G7gI+AeYXyzZTvGufrv6Oq4LoNWBtHyY5iofM6FzjE8OQhHTt2lvC7gK8Ymfz6OEo+aKH+cVHnpuj6cZLvK+HvLOHPJE3+kRJ+WfK3ZPxi/ZVQVwsY782UJT9aMgOM3CkajX/zKPzKqKMFhEq+C9hbwm8Jk20B54DF2SlPEUsYeZw1Fv9+Rpo1Fv8eRi68WsZEvgM2ANsm++CGcSSOkv4qxClxOhwSHQNCCwiNjgGhBYRGx4DQAkKjY0BoAaHRMSC0gNDoGBBaQGh0DAgtIDQ6BoQWEBoTMeB6OPs7VZU4kS2xacDrwFIgaunH7cF54MU4Sk5XIU/5P001jc4YEFpAaHQMCC0gNP4DiOO4AR/HDksAAAAASUVORK5CYII="
ropData = base64.b64decode(rop64)
ropPix = QtGui.QPixmap()
ropPix.loadFromData(ropData)
self.mw.bttn_3.setIcon(ropPix)
#self.mw.bttn_3.setIcon(QtGui.QPixmap(root_path + "\Assets\\rop_ico_64.png"))
self.mw.bttn_3.setIconSize(QtCore.QSize(icoSize, icoSize))
self.mw.bttn_3.clicked.connect(lambda: self.loadWin('ropWindow'))
#Debug Button
#self.mw.bttn_debug.setEnabled(False)
self.mw.bttn_debug.clicked.connect(lambda: self.loadWin('debugWindow'))
## Print empty Line after Init for console clearity.
print("")
########################################
# CLOSE EVENT #
########################################
def closeEvent(self,event):
#Close Windows Here
try:
self.geoWin.close()
except:
pass
try:
self.debugWin.close()
except:
pass
try:
self.shopWin.close()
except:
pass
try:
self.ropWin.close()
except:
pass
print('Daz to Houdini NEO: closing Windows (mainWindow) ' + sessID + '.')
########################################
# Load Window from Menu #
########################################
def loadWin(self, targetWindow):
## Geometry Window
if targetWindow == 'geoWindow':
try:
self.geoWin.close()
except:
pass
self.geoWin = geoWindow(root_path,sessID,parent=mainWindow)
self.geoWin.rootPath = root_path
self.geoWin.resize(380,490)
#self.geoWin.closed.connect(self.expandMe)
## Debug Window
if targetWindow == 'debugWindow':
try:
self.debugWin.close()
except:
pass
self.debugWin = debugWindow(root_path,sessID,parent=mainWindow)
self.debugWin.rootPath = root_path
self.debugWin.resize(361,480)
## Shader Window
if targetWindow == 'shopWindow':
try:
self.shopWin.close()
except:
pass
self.shopWin = shopWindow(root_path,sessID,parent=mainWindow)
self.shopWin.rootPath = root_path
self.shopWin.resize(380,490)
## Render Window
if targetWindow == 'ropWindow':
try:
self.ropWin.close()
except:
pass
self.ropWin = ropWindow(root_path,sessID,parent=mainWindow)
self.ropWin.rootPath = root_path
self.ropWin.resize(380,490)
self.collapse()
def collapse(self):
self.resize(361,171)
def expandMe(self):
self.resize(361,520)
def sessID_gen(self):
#Creates a random generated string for keeping a tab on instance
letters = string.ascii_letters
global sessID
sessID= ''.join(random.choice(letters) for i in range(8))
###############################################################################
# Main Control #
###############################################################################
#Shelf Tool Only...
#Not Needed if Importing from Houdini as Module or two instance will show on startup initially.
#Try to Close previous instance.
try:
mainWindow.close()
except:
pass
mainWindow = H2Dz()
mainWindow.resize(361,520)
mainWindow.show()
| 404.5 | 51,822 | 0.931333 | 3,057 | 93,844 | 28.570167 | 0.810599 | 0.001511 | 0.002061 | 0.00103 | 0.011198 | 0.0079 | 0.00158 | 0.00158 | 0 | 0 | 0 | 0.150828 | 0.027855 | 93,844 | 231 | 51,823 | 406.251082 | 0.806511 | 0.015536 | 0 | 0.280303 | 0 | 0.045455 | 0.942823 | 0.940482 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0.068182 | 0.045455 | null | null | 0.022727 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
a8071669d3a09acab909992f8c9c1c712b673dea | 103 | py | Python | src/root/views.py | bayuyuhartono-katadata/flask-project | 000bafa4f593474eec2171b1b1ce392d64c5c47d | [
"MIT"
] | 1 | 2019-10-28T06:53:36.000Z | 2019-10-28T06:53:36.000Z | src/root/views.py | bayuyuhartono-katadata/flask-project | 000bafa4f593474eec2171b1b1ce392d64c5c47d | [
"MIT"
] | 1 | 2019-12-26T22:21:29.000Z | 2019-12-29T12:47:33.000Z | src/root/views.py | bayuyuhartono-katadata/flask-project | 000bafa4f593474eec2171b1b1ce392d64c5c47d | [
"MIT"
] | 1 | 2019-11-08T02:03:13.000Z | 2019-11-08T02:03:13.000Z | from flask import render_template, request
def index():
return render_template('root/index.html')
| 20.6 | 45 | 0.76699 | 14 | 103 | 5.5 | 0.785714 | 0.363636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135922 | 103 | 4 | 46 | 25.75 | 0.865169 | 0 | 0 | 0 | 0 | 0 | 0.145631 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
b53dbb11e4d5ad581c2fa69527ba8815de3cce2f | 4,076 | py | Python | models/flow_fn.py | xuanqing94/NeuralSDE | f3511799cfc9c3d6b95ff9bcb07563df88715e0c | [
"MIT"
] | 5 | 2020-06-28T07:15:35.000Z | 2022-01-20T01:52:31.000Z | models/flow_fn.py | xuanqing94/NeuralSDE | f3511799cfc9c3d6b95ff9bcb07563df88715e0c | [
"MIT"
] | null | null | null | models/flow_fn.py | xuanqing94/NeuralSDE | f3511799cfc9c3d6b95ff9bcb07563df88715e0c | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
from .layers.conv2d import RandConv2d
from .layers.concat_conv2d import ConcatConv2d, RandConcatConv2d
from .layers.concat_conv2d import ConcatConv2d_v2, RandConcatConv2d_v2
from .layers.groupnorm2d import RandGroupNorm
def norm(dim):
return nn.GroupNorm(min(32, dim), dim)
def rand_norm(dim, **rand_args):
return RandGroupNorm(min(32, dim), dim, **rand_args)
class FlowFn(nn.Module):
def __init__(self, dim):
super().__init__()
self.norm1 = norm(dim)
self.relu1 = nn.ReLU(inplace=True)
self.conv1 = ConcatConv2d(dim, dim, 3, 1, 1)
self.norm2 = norm(dim)
self.relu2 = nn.ReLU(inplace=True)
self.conv2 = ConcatConv2d(dim, dim, 3, 1, 1)
self.norm3 = norm(dim)
self.relu3 = nn.ReLU(inplace=True)
self.conv3 = ConcatConv2d(dim, dim, 3, 1, 1)
self.norm4 = norm(dim)
def forward(self, t, x):
out = self.norm1(x)
out = self.relu1(out)
out = self.conv1(t, out)
out = self.norm2(out)
out = self.relu2(out)
out = self.conv2(t, out)
out = self.norm3(out)
out = self.relu3(out)
out = self.conv3(t, out)
out = self.norm4(out)
return out
class FlowFn_v2(nn.Module):
def __init__(self, dim):
super().__init__()
self.norm1 = norm(dim)
self.relu1 = nn.ReLU(inplace=True)
self.conv1 = ConcatConv2d_v2(dim, dim, 3, 1, 1)
self.norm2 = norm(dim)
self.relu2 = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(dim, dim, 3, 1, 1)
self.norm3 = norm(dim)
self.relu3 = nn.ReLU(inplace=True)
self.conv3 = nn.Conv2d(dim, dim, 3, 1, 1)
self.norm4 = norm(dim)
def forward(self, t, x):
out = self.norm1(x)
out = self.relu1(out)
out = self.conv1(t, out)
out = self.norm2(out)
out = self.relu2(out)
out = self.conv2(out)
out = self.norm3(out)
out = self.relu3(out)
out = self.conv3(out)
out = self.norm4(out)
return out
class RandFlowFn(nn.Module):
def __init__(self, dim, **rand_args):
super().__init__()
self.norm1 = rand_norm(dim, **rand_args)
self.relu1 = nn.ReLU(inplace=True)
self.conv1 = RandConcatConv2d(dim, dim, 3, 1, 1, **rand_args)
self.norm2 = rand_norm(dim, **rand_args)
self.relu2 = nn.ReLU(inplace=True)
self.conv2 = RandConcatConv2d(dim, dim, 3, 1, 1, **rand_args)
self.norm3 = rand_norm(dim, **rand_args)
self.relu3 = nn.ReLU(inplace=True)
self.conv3 = RandConcatConv2d(dim, dim, 3, 1, 1, **rand_args)
self.norm4 = rand_norm(dim, **rand_args)
def forward(self, t, x):
out = self.norm1(x)
out = self.relu1(out)
out = self.conv1(t, out)
out = self.norm2(out)
out = self.relu2(out)
out = self.conv2(t, out)
out = self.norm3(out)
out = self.relu3(out)
out = self.conv3(t, out)
out = self.norm4(out)
return out
class RandFlowFn_v2(nn.Module):
def __init__(self, dim, **rand_args):
super().__init__()
self.norm1 = rand_norm(dim, **rand_args)
self.relu1 = nn.ReLU(inplace=True)
self.conv1 = RandConcatConv2d_v2(dim, dim, 3, 1, 1, **rand_args)
self.norm2 = rand_norm(dim, **rand_args)
self.relu2 = nn.ReLU(inplace=True)
self.conv2 = RandConv2d(dim, dim, 3, 1, 1, **rand_args)
self.norm3 = rand_norm(dim, **rand_args)
self.relu3 = nn.ReLU(inplace=True)
self.conv3 = RandConv2d(dim, dim, 3, 1, 1, **rand_args)
self.norm4 = rand_norm(dim, **rand_args)
def forward(self, t, x):
out = self.norm1(x)
out = self.relu1(out)
out = self.conv1(t, out)
out = self.norm2(out)
out = self.relu2(out)
out = self.conv2(out)
out = self.norm3(out)
out = self.relu3(out)
out = self.conv3(out)
out = self.norm4(out)
return out
| 33.409836 | 72 | 0.578263 | 571 | 4,076 | 4.010508 | 0.085814 | 0.122271 | 0.139738 | 0.089083 | 0.89869 | 0.890393 | 0.853712 | 0.841485 | 0.834498 | 0.800873 | 0 | 0.050429 | 0.284838 | 4,076 | 121 | 73 | 33.68595 | 0.735163 | 0 | 0 | 0.763636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.054545 | 0.018182 | 0.236364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b56a8dccb1d20d3362da5cc9efadaa560142dc83 | 8,340 | py | Python | ematmap/engine.py | ihgazni2/ematmap | e5aca1db403b2765f8d1ceefc29b721f14f72339 | [
"MIT"
] | null | null | null | ematmap/engine.py | ihgazni2/ematmap | e5aca1db403b2765f8d1ceefc29b721f14f72339 | [
"MIT"
] | null | null | null | ematmap/engine.py | ihgazni2/ematmap | e5aca1db403b2765f8d1ceefc29b721f14f72339 | [
"MIT"
] | null | null | null | import efuntool.efuntool as eftl
import elist.elist as elel
import copy
#MAP
def _get_fo(x,y,**kwargs):
map_func = eftl.dflt_kwargs("map_func",lambda ele:ele,**kwargs)
other_args = eftl.dflt_kwargs("other_args",[],**kwargs)
map_func_mat = eftl.dflt_kwargs("map_func_mat",None,**kwargs)
other_args_mat = eftl.dflt_kwargs("other_args_mat",None,**kwargs)
if(map_func_mat == None):
pass
else:
map_func = map_func_mat[x][y]
if(other_args_mat == None):
pass
else:
other_args = other_args_mat[x][y]
return((map_func,other_args))
def _get_rtrn(x,y,m,**kwargs):
rtrn = eftl.dflt_kwargs("rtrn","ele",**kwargs)
if(rtrn == "ele"):
return(m[x][y])
else:
return([x,y])
def udlr_wrap(func):
@eftl.deepcopy_wrapper
def wrapper(m,**kwargs):
lngth = len(m)
for x in range(lngth):
layer = m[x]
llen = len(layer)
for y in range(llen):
map_func,other_args = _get_fo(x,y,**kwargs)
m[x][y] = func({
"f":map_func,
"x":x,
"y":y,
"o":other_args,
"m":m
})
return(m)
return(wrapper)
def udrl_wrap(func):
@eftl.deepcopy_wrapper
def wrapper(m,**kwargs):
lngth = len(m)
for x in range(lngth):
layer = m[x]
llen = len(layer)
for y in range(llen-1,-1,-1):
map_func,other_args = _get_fo(x,y,**kwargs)
m[x][y] = func({
"f":map_func,
"x":x,
"y":y,
"o":other_args,
"m":m
})
return(m)
return(wrapper)
def dulr_wrap(func):
@eftl.deepcopy_wrapper
def wrapper(m,**kwargs):
lngth = len(m)
for x in range(lngth-1,-1,-1):
layer = m[x]
llen = len(layer)
for y in range(llen):
map_func,other_args = _get_fo(x,y,**kwargs)
m[x][y] = func({
"f":map_func,
"x":x,
"y":y,
"o":other_args,
"m":m
})
return(m)
return(wrapper)
def durl_wrap(func):
@eftl.deepcopy_wrapper
def wrapper(m,**kwargs):
lngth = len(m)
for x in range(lngth-1,-1,-1):
layer = m[x]
llen = len(layer)
for y in range(llen-1,-1,-1):
map_func,other_args = _get_fo(x,y,**kwargs)
m[x][y] = func({
"f":map_func,
"x":x,
"y":y,
"o":other_args,
"m":m
})
return(m)
return(wrapper)
def mapf(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(*other_args)
return(ele)
def mapx(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,*other_args)
return(ele)
def mapy(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,*other_args)
return(ele)
def mapv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(v,*other_args)
return(ele)
def mapo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(*other_args)
return(ele)
def mapfx(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,*other_args)
return(ele)
def mapfy(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,*other_args)
return(ele)
def mapfv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(v,*other_args)
return(ele)
def mapfo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(o,*other_args)
return(ele)
def mapxy(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,*other_args)
return(ele)
def mapxv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,v,*other_args)
return(ele)
def mapxo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,*other_args)
return(ele)
def mapyv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,v,*other_args)
return(ele)
def mapyo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,*other_args)
return(ele)
def mapvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(v,*other_args)
return(ele)
def mapfxy(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,*other_args)
return(ele)
def mapfxv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,v,*other_args)
return(ele)
def mapfxo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,o,*other_args)
return(ele)
def mapfyv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,v,*other_args)
return(ele)
def mapfyo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,o,*other_args)
return(ele)
def mapfvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(v,o,*other_args)
return(ele)
def mapxyv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,v,*other_args)
return(ele)
def mapxyo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,*other_args)
return(ele)
def mapxvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,v,*other_args)
return(ele)
def mapyvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,v,*other_args)
return(ele)
def mapfxyv(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,v,*other_args)
return(ele)
def mapfxyo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,o,*other_args)
return(ele)
def mapfxvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,v,o,*other_args)
return(ele)
def mapfyvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(y,v,o,*other_args)
return(ele)
def mapxyvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,v,*other_args)
return(ele)
def mapfxyvo(d):
m = d['m']
map_func = d['f']
other_args = d['o']
x = d['x']
y = d['y']
v = m[x][y]
ele = map_func(x,y,v,o,*other_args)
return(ele)
################
| 17.820513 | 69 | 0.438129 | 1,383 | 8,340 | 2.505423 | 0.056399 | 0.050794 | 0.031169 | 0.035786 | 0.844156 | 0.815007 | 0.802309 | 0.765079 | 0.765079 | 0.765079 | 0 | 0.002248 | 0.359952 | 8,340 | 467 | 70 | 17.858672 | 0.646871 | 0.00036 | 0 | 0.836898 | 0 | 0 | 0.027527 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.109626 | false | 0.005348 | 0.008021 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b5812a4b100c43ca293cc1146f5003425a9c671d | 97 | py | Python | tests/conftest.py | SOFIE-project/interledger-asset-transfer | d1c2e5ff7af564b1ac1775772132b48a46c71280 | [
"Apache-2.0"
] | 4 | 2020-12-11T18:46:35.000Z | 2022-01-07T16:58:43.000Z | tests/conftest.py | SOFIE-project/interledger-asset-transfer | d1c2e5ff7af564b1ac1775772132b48a46c71280 | [
"Apache-2.0"
] | 3 | 2021-05-11T23:45:18.000Z | 2021-09-02T18:45:25.000Z | tests/conftest.py | SOFIE-project/Interledger | d1c2e5ff7af564b1ac1775772132b48a46c71280 | [
"Apache-2.0"
] | 4 | 2020-11-04T15:36:35.000Z | 2021-08-05T12:38:08.000Z | import sys, os
sys.path.append(os.path.realpath('.'))
sys.path.append(os.path.realpath('./src')) | 24.25 | 42 | 0.701031 | 16 | 97 | 4.25 | 0.4375 | 0.205882 | 0.382353 | 0.441176 | 0.794118 | 0.794118 | 0 | 0 | 0 | 0 | 0 | 0 | 0.051546 | 97 | 4 | 42 | 24.25 | 0.73913 | 0 | 0 | 0 | 0 | 0 | 0.061224 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
b5a8efe718beb373bd22f7455c0c8b355b57b542 | 1,903 | py | Python | test/replay_buffers/advantages.py | emasquil/ppo | 83b54926ea69244d382bfb958271718932894eb0 | [
"MIT"
] | null | null | null | test/replay_buffers/advantages.py | emasquil/ppo | 83b54926ea69244d382bfb958271718932894eb0 | [
"MIT"
] | 35 | 2022-03-01T10:05:50.000Z | 2022-03-30T20:37:22.000Z | test/replay_buffers/advantages.py | emasquil/ppo | 83b54926ea69244d382bfb958271718932894eb0 | [
"MIT"
] | null | null | null | import jax
import jax.numpy as jnp
from ppo.agents.advantage import general_advantage_estimation
class Case1:
def __init__(self):
self.values_t = jnp.array([0, 1, 2, 3], dtype=jnp.float32)
self.rewards_tp1 = jnp.array([1, 0, 1], dtype=jnp.float32)
self.dones_tp1 = jnp.array([0, 0, 1], dtype=bool)
def __len__(self):
return 3
def correct_gae(self):
return jnp.array([2.25, 0.5, -1])
class Case2:
def __init__(self):
self.values_t = jnp.array([0, 1, 2, 3], dtype=jnp.float32)
self.rewards_tp1 = jnp.array([1, 0, 1], dtype=jnp.float32)
self.dones_tp1 = jnp.array([0, 0, 0], dtype=bool)
def __len__(self):
return 3
def correct_gae(self):
return jnp.array([3, 2, 2])
class Case3:
def __init__(self):
self.values_t = jnp.array([0, 1, 2, 3], dtype=jnp.float32)
self.rewards_tp1 = jnp.array([1, 0, 1], dtype=jnp.float32)
self.dones_tp1 = jnp.array([0, 1, 0], dtype=bool)
def __len__(self):
return 3
def correct_gae(self):
return jnp.array([1.5, -1, 2])
if __name__ == "__main__":
replay_buffer = Case1()
gae = general_advantage_estimation(
replay_buffer.values_t, replay_buffer.dones_tp1, replay_buffer.rewards_tp1, discount=1, gae_lambda=0.5
)
assert jnp.allclose(gae, replay_buffer.correct_gae())
replay_buffer = Case2()
gae = general_advantage_estimation(
replay_buffer.values_t, replay_buffer.dones_tp1, replay_buffer.rewards_tp1, discount=1, gae_lambda=0.5
)
assert jnp.allclose(gae, replay_buffer.correct_gae())
replay_buffer = Case3()
gae = general_advantage_estimation(
replay_buffer.values_t, replay_buffer.dones_tp1, replay_buffer.rewards_tp1, discount=1, gae_lambda=0.5
)
assert jnp.allclose(gae, replay_buffer.correct_gae())
print("Test completed")
| 29.276923 | 110 | 0.656332 | 282 | 1,903 | 4.138298 | 0.173759 | 0.154242 | 0.046272 | 0.097686 | 0.844045 | 0.844045 | 0.844045 | 0.844045 | 0.844045 | 0.844045 | 0 | 0.056894 | 0.214924 | 1,903 | 64 | 111 | 29.734375 | 0.72423 | 0 | 0 | 0.574468 | 0 | 0 | 0.011561 | 0 | 0 | 0 | 0 | 0 | 0.06383 | 1 | 0.191489 | false | 0 | 0.06383 | 0.12766 | 0.446809 | 0.021277 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.