hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ddcbfbd04d424120fba564125c159a2a41070598 | 2,589 | py | Python | scripts/hsv_shit.py | skyelong/Insight_Project_clean | 19ea003e5c5f4013a66ec244e886036b2b78212b | [
"MIT"
] | 1 | 2020-06-14T02:59:27.000Z | 2020-06-14T02:59:27.000Z | scripts/hsv_shit.py | skyelong/Insight_Project_clean | 19ea003e5c5f4013a66ec244e886036b2b78212b | [
"MIT"
] | null | null | null | scripts/hsv_shit.py | skyelong/Insight_Project_clean | 19ea003e5c5f4013a66ec244e886036b2b78212b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 13 20:54:43 2020
@author: macbook
"""
import cv2
import pandas as pd
def shift_h(array, v_thresh, s_thresh):
"""Produces shifted H values for color segmentation
Chroma h is returned from 0 - 179, neutral h are returned 200-255
Inputs: data - array of pixel H, S, V values one entry per pixel
Outputs: H, H120, H240
"""
shifted_colors = []
for i in range(0,len(data)):
H = data[i][0]
s = data[i][1]
v = data[i][2]
V_thres = 255*v_thresh
S_thres = 255*s_thresh
if (v > V_thres and s > S_thres):
if H >= 120:
H120 = H - 120
else:
H120 = H + 60
if H >= 60:
H240 = H - 60
else:
H240 = H + 120
else:
H = 200 + ((v/255)*55)
H120 = H
H240 = H
shifted_colors.append([H,H120,H240])
return shifted_colors
def shift_h_df(data, v_thresh, s_thresh):
"""Produces shifted H values for color segmentation
Inputs: data - dataframe of pixel H, S, V values one entry per pixel
Outputs: H, H120, H240
"""
shifted_colors = []
for i in range(0,len(data)):
H = data["h"][i]
s = data["s"][i]
v = data["v"][i]
V_thres = 255*v_thresh
S_thres = 255*s_thresh
if (v > V_thres and s > S_thres):
if H >= 120:
H120 = H - 120
else:
H120 = H + 60
if H >= 60:
H240 = H - 60
else:
H240 = H + 120
else:
H = 200 + ((v/255)*55)
H120 = H
H240 = H
shifted_colors.append([H,H120,H240])
return shifted_colors
def shift_h_df_remove(data, v_thresh, s_thresh):
"""Produces shifted H values for color segmentation
Inputs: data - list of pixel H, S, V values one entry per pixel
Outputs: H, H120, H240
"""
shifted_colors = []
for i in range(0,len(data)):
H = data["h"][i]
s = data["s"][i]
v = data["v"][i]
V_thres = 255*v_thresh
S_thres = 255*s_thresh
if (v > V_thres and s > S_thres):
if H >= 120:
H120 = H - 120
else:
H120 = H + 60
if H >= 60:
H240 = H - 60
else:
H240 = H + 120
shifted_colors.append([H,H120,H240])
else:
pass
return shifted_colors | 27.542553 | 72 | 0.481267 | 362 | 2,589 | 3.334254 | 0.212707 | 0.096935 | 0.039768 | 0.034797 | 0.807788 | 0.807788 | 0.78459 | 0.78459 | 0.78459 | 0.78459 | 0 | 0.126797 | 0.409038 | 2,589 | 94 | 73 | 27.542553 | 0.662092 | 0.222866 | 0 | 0.869565 | 0 | 0 | 0.003083 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0.014493 | 0.028986 | 0 | 0.115942 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9b398f18d9054d135af6f6456e8316b116bc2730 | 2,156 | py | Python | testsim15.py | shurkova/currentVers | 25027f3f4faa9033b69041459f0785c1436c3f31 | [
"CECILL-B"
] | 1 | 2020-09-09T15:30:38.000Z | 2020-09-09T15:30:38.000Z | testsim15.py | shurkova/currentVers | 25027f3f4faa9033b69041459f0785c1436c3f31 | [
"CECILL-B"
] | null | null | null | testsim15.py | shurkova/currentVers | 25027f3f4faa9033b69041459f0785c1436c3f31 | [
"CECILL-B"
] | 11 | 2020-05-01T09:03:14.000Z | 2022-02-09T14:17:41.000Z | simType='sym_file'
symProps = [{'name': 'lovesMaryTom', 'RBs': [{'pred_name': 'lover', 'pred_sem': ['lover1', 'lover2', 'lover3'], 'higher_order': False, 'object_name': 'Mary', 'object_sem': ['mary1', 'mary2', 'mary3'], 'P': 'non_exist'}, {'pred_name': 'beloved', 'pred_sem': ['beloved1', 'beloved2', 'beloved3'], 'higher_order': False, 'object_name': 'Tom', 'object_sem': ['tom1', 'tom2', 'tome3'], 'P': 'non_exist'}], 'set': 'driver', 'analog': 0},
{'name': 'lovesTomJane', 'RBs': [{'pred_name': 'lover', 'pred_sem': ['lover1', 'lover2', 'lover3'], 'higher_order': False, 'object_name': 'Tom', 'object_sem': ['tom1', 'tom2', 'tome3'], 'P': 'non_exist'}, {'pred_name': 'beloved', 'pred_sem': ['beloved1', 'beloved2', 'beloved3'], 'higher_order': False, 'object_name': 'Jane', 'object_sem': ['jane1', 'jane2', 'mary2'], 'P': 'non_exist'}], 'set': 'driver', 'analog': 0},
{'name': 'jealousMaryJane', 'RBs': [{'pred_name': 'jealous_act', 'pred_sem': ['jel1', 'jel2', 'jel3'], 'higher_order': False, 'object_name': 'Mary', 'object_sem': ['mary1', 'mary2', 'mary3'], 'P': 'non_exist'}, {'pred_name': 'jealous_pat', 'pred_sem': ['jel4', 'jel5', 'jel6'], 'higher_order': False, 'object_name': 'Jane', 'object_sem': ['jane1', 'jane2', 'mary2'], 'P': 'non_exist'}], 'set': 'driver', 'analog': 0},
{'name': 'lovesJohnKathy', 'RBs': [{'pred_name': 'lover', 'pred_sem': ['lover1', 'lover2', 'lover3'], 'higher_order': False, 'object_name': 'John', 'object_sem': ['john1', 'john2', 'john3'], 'P': 'non_exist'}, {'pred_name': 'beloved', 'pred_sem': ['beloved1', 'beloved2', 'beloved3'], 'higher_order': False, 'object_name': 'Kathy', 'object_sem': ['kathy1', 'kathy2', 'kathy3'], 'P': 'non_exist'}], 'set': 'recipient', 'analog': 0},
{'name': 'lovesKathySam', 'RBs': [{'pred_name': 'lover', 'pred_sem': ['lover1', 'lover2', 'lover3'], 'higher_order': False, 'object_name': 'Kathy', 'object_sem': ['kathy1', 'kathy2', 'kathy3'], 'P': 'non_exist'}, {'pred_name': 'beloved', 'pred_sem': ['beloved1', 'beloved2', 'beloved3'], 'higher_order': False, 'object_name': 'Sam', 'object_sem': ['sam1', 'sam2', 'sam3'], 'P': 'non_exist'}], 'set': 'recipient', 'analog': 0}]
| 165.846154 | 432 | 0.607607 | 261 | 2,156 | 4.777778 | 0.249042 | 0.064154 | 0.128308 | 0.176423 | 0.825982 | 0.825982 | 0.825982 | 0.788292 | 0.772253 | 0.772253 | 0 | 0.033333 | 0.095547 | 2,156 | 12 | 433 | 179.666667 | 0.606154 | 0 | 0 | 0 | 0 | 0 | 0.575673 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9b3f76fa1d35675a07fbb0103b68bfc50da25ffc | 201 | py | Python | assets/code/src/test_integer_division.py | drtrome/rsd | 5fdc0edd2733319d1c8c2e7dbbd8e21af1b0606c | [
"MIT"
] | null | null | null | assets/code/src/test_integer_division.py | drtrome/rsd | 5fdc0edd2733319d1c8c2e7dbbd8e21af1b0606c | [
"MIT"
] | 92 | 2017-10-01T14:00:05.000Z | 2020-04-20T13:06:40.000Z | assets/code/src/test_integer_division.py | drtrome/rsd | 5fdc0edd2733319d1c8c2e7dbbd8e21af1b0606c | [
"MIT"
] | 24 | 2017-12-11T16:46:31.000Z | 2020-01-08T11:54:10.000Z | import integer_division
assert integer_division.get_exponent_of_factor(45, 5) == 1
assert integer_division.get_exponent_of_factor(45, 7) == 0
assert integer_division.get_exponent_of_factor(9, 9) == 1
| 33.5 | 58 | 0.820896 | 33 | 201 | 4.606061 | 0.424242 | 0.394737 | 0.414474 | 0.473684 | 0.815789 | 0.815789 | 0.815789 | 0.552632 | 0 | 0 | 0 | 0.060109 | 0.089552 | 201 | 5 | 59 | 40.2 | 0.770492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.75 | 1 | 0 | true | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
9b9193eb4c9f1666c2b28aaa8298556db3418728 | 201 | py | Python | Codewars/7kyu/password-hashes/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/7kyu/password-hashes/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/7kyu/password-hashes/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 3.6.0
Test.describe('Basic tests')
Test.assert_equals(pass_hash('password'), '5f4dcc3b5aa765d61d8327deb882cf99')
Test.assert_equals(pass_hash('abc123'), 'e99a18c428cb38d5f260853678922e03')
| 33.5 | 77 | 0.800995 | 22 | 201 | 7.136364 | 0.727273 | 0.127389 | 0.203822 | 0.254777 | 0.305732 | 0 | 0 | 0 | 0 | 0 | 0 | 0.252632 | 0.054726 | 201 | 5 | 78 | 40.2 | 0.573684 | 0.069652 | 0 | 0 | 0 | 0 | 0.481081 | 0.345946 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0 | true | 0.666667 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
32d44e2cda15c81583b684ffc016731ccac03212 | 13,068 | py | Python | tests/rst/test_opt_list.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | tests/rst/test_opt_list.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | tests/rst/test_opt_list.py | LudditeLabs/autodoc-tool | b4ae7e3b61907e7e9c3a1b534fce055e5860ffab | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Luddite Labs Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Test: options list.
class TesOptList:
# Test: if we don't pass source lines info and width.
# * Left side option width must be 15
# * Options with long descriptions have 1 blank line margin
# * Long options have 1 blank line margin and description on next line.
# * Other options have no margins.
def test_no_src(self, assert_py_doc):
assert_py_doc(
settings=dict(line_width=None),
text="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
expected="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option
A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE
These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
pass_lines=False
)
# Test: if we pass no source lines info but set width.
# * Left side option width must be width / 4
# * Options with long descriptions have 1 blank line margin
# * Long options have 1 blank line margin and description on next line.
# * Other options have no margins.
def test_no_src_width(self, assert_py_doc):
assert_py_doc(
settings=dict(line_width=66),
text="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
expected="""
Top text.
-a, --ax Output all.
-b Output both (this description is quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted
between options (as above) or left in (as here and
below).
--very-long-option
A VMS-style option. Note the adjustment for the
required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE
These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
pass_lines=False
)
# Test: if we pass source lines info but no width.
# * Left side option width must be the same as in orig text.
# * Options with long descriptions have 1 blank line margin
# * Long options have 1 blank line margin and description on next line.
# * Other options have no margins.
def test_src(self, assert_py_doc):
assert_py_doc(
text="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
expected="""
Top text.
-a, --ax Output all.
-b Output both (this description is quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description. This
is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option
A VMS-style option. Note the adjustment for the required
two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE
These two options are synonyms; both have arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
pass_lines=True
)
# Test: if we pass source lines info and width.
# * Left side option width must be the same as in orig text.
# * Options with long descriptions have 1 blank line margin
# * Long options have 1 blank line margin and description on next line.
# * Other options have no margins.
def test_src_width(self, assert_py_doc):
assert_py_doc(
settings=dict(line_width=66),
text="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
expected="""
Top text.
-a, --ax Output all.
-b Output both (this description is quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description. This
is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option
A VMS-style option. Note the adjustment for the
required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE
These two options are synonyms; both have arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
pass_lines=True
)
# Test: if we pass all info + force margin between options.
def test_margin(self, assert_py_doc):
assert_py_doc(
settings=dict(opt_margin=True, line_width=66),
text="""
Top text.
-a, --ax Output all.
-b Output both (this description is
quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option A VMS-style option. Note the adjustment for
the required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE These two options are synonyms; both have
arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
expected="""
Top text.
-a, --ax Output all.
-b Output both (this description is quite long).
-c arg Output just arg.
--long Output all day long.
-p This option has two paragraphs in the description. This
is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here and below).
--very-long-option
A VMS-style option. Note the adjustment for the
required two spaces.
--an-even-longer-option
The description can also start on the next line.
-2, --two This option has two variants.
-f FILE, --file=FILE
These two options are synonyms; both have arguments.
/V A VMS/DOS-style option.
Bottom text.
""",
pass_lines=True
)
| 35.034853 | 82 | 0.496021 | 1,512 | 13,068 | 4.261243 | 0.105159 | 0.027937 | 0.040354 | 0.049666 | 0.909514 | 0.909514 | 0.909514 | 0.90253 | 0.898494 | 0.892597 | 0 | 0.00482 | 0.444368 | 13,068 | 372 | 83 | 35.129032 | 0.882523 | 0.127717 | 0 | 0.920502 | 0 | 0 | 0.901258 | 0.020241 | 0 | 0 | 0 | 0 | 0.041841 | 1 | 0.020921 | false | 0.020921 | 0 | 0 | 0.025105 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fd17c9d24f779e07c3a9537db132f4ed960d54d4 | 101 | py | Python | rkd/util.py | iro-upgto/rkd | 7823781ddc81a9dac18fed55080205e8ed68b57b | [
"MIT"
] | null | null | null | rkd/util.py | iro-upgto/rkd | 7823781ddc81a9dac18fed55080205e8ed68b57b | [
"MIT"
] | null | null | null | rkd/util.py | iro-upgto/rkd | 7823781ddc81a9dac18fed55080205e8ed68b57b | [
"MIT"
] | null | null | null | from numpy import *
def deg2rad(x):
return ((x*pi)/180)
def rad2deg(x):
return ((x*180)/pi) | 14.428571 | 23 | 0.60396 | 17 | 101 | 3.588235 | 0.588235 | 0.229508 | 0.262295 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0.207921 | 101 | 7 | 24 | 14.428571 | 0.6625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.2 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
fd1e1c6ec4ff50c811ca0ba7f6cd5449de3d665f | 235 | py | Python | backend/models/__init__.py | riftadi/trashexplorer | fed8ed83c277d56a548ff6036af57d776db3bb9e | [
"Apache-2.0"
] | null | null | null | backend/models/__init__.py | riftadi/trashexplorer | fed8ed83c277d56a548ff6036af57d776db3bb9e | [
"Apache-2.0"
] | 3 | 2018-06-14T11:08:52.000Z | 2022-03-02T08:08:34.000Z | backend/models/__init__.py | riftadi/trashure | fed8ed83c277d56a548ff6036af57d776db3bb9e | [
"Apache-2.0"
] | null | null | null | from models.areamodel import *
from models.basedb import *
from models.gamemodel import *
from models.revokedtokenmodel import *
from models.trashbinmodel import *
from models.trashbinimagemodel import *
from models.usermodel import *
| 29.375 | 39 | 0.821277 | 28 | 235 | 6.892857 | 0.357143 | 0.362694 | 0.497409 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119149 | 235 | 7 | 40 | 33.571429 | 0.932367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fd3e4cc167ea1ff675c1e1a64a0442080413812e | 169 | py | Python | baedin/chat/views.py | braedinmgregoire/DndWebsite | 19b55117cba39b524ed2cd74b22f33587733fb58 | [
"MIT"
] | null | null | null | baedin/chat/views.py | braedinmgregoire/DndWebsite | 19b55117cba39b524ed2cd74b22f33587733fb58 | [
"MIT"
] | null | null | null | baedin/chat/views.py | braedinmgregoire/DndWebsite | 19b55117cba39b524ed2cd74b22f33587733fb58 | [
"MIT"
] | null | null | null | from django.shortcuts import render
# Create your views here.
from django.shortcuts import render
def tavern(request):
return render(request, 'chat/tavern.html')
| 18.777778 | 46 | 0.769231 | 23 | 169 | 5.652174 | 0.652174 | 0.153846 | 0.292308 | 0.384615 | 0.476923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147929 | 169 | 8 | 47 | 21.125 | 0.902778 | 0.136095 | 0 | 0.5 | 0 | 0 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
fd4b935d395704d55d505fcc6d63bd3199b0d576 | 92 | py | Python | parameters_443.py | droptables/Battle-Dex | 279b43bb2027e4a54050c9cccddc399c0b96da13 | [
"BSD-3-Clause"
] | null | null | null | parameters_443.py | droptables/Battle-Dex | 279b43bb2027e4a54050c9cccddc399c0b96da13 | [
"BSD-3-Clause"
] | null | null | null | parameters_443.py | droptables/Battle-Dex | 279b43bb2027e4a54050c9cccddc399c0b96da13 | [
"BSD-3-Clause"
] | null | null | null | password="pbkdf2(1000,20,sha512)$a85a47dcd18825d8$3656c81d1b8497c50ecdb08566babf1e68e8c17b"
| 46 | 91 | 0.891304 | 7 | 92 | 11.714286 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.483516 | 0.01087 | 92 | 1 | 92 | 92 | 0.417582 | 0 | 0 | 0 | 0 | 0 | 0.869565 | 0.869565 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
b5d3037577f7e53e0de912c8b594fc1d59a629be | 13,861 | py | Python | mmtfPyspark/tests/utils/test_MmtfSubstructure.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 59 | 2018-01-28T06:50:56.000Z | 2022-02-10T06:07:12.000Z | mmtfPyspark/tests/utils/test_MmtfSubstructure.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 101 | 2018-02-01T20:51:10.000Z | 2022-01-24T00:50:29.000Z | mmtfPyspark/tests/utils/test_MmtfSubstructure.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 29 | 2018-01-29T10:09:51.000Z | 2022-01-23T18:53:28.000Z | #!/usr/bin/env python
'''
Authorship information:
__author__ = "Peter Rose"
__maintainer__ = "Peter Rose"
__status__ = "Warning"
'''
import unittest
import numpy as np
from pyspark.sql import SparkSession
from mmtfPyspark.io import mmtfReader
from mmtfPyspark.utils import MmtfSubstructure
class TestMmtfSubstructure(unittest.TestCase):
def setUp(self):
self.spark = SparkSession.builder.master("local[1]") \
.appName("TestMmtfSubstructure") \
.getOrCreate()
def test_4HHB_polychain(self):
print('test_4HHB_polychain')
path = '../../../resources/files/'
pdb = mmtfReader.read_mmtf_files(path)
pdb = pdb.filter(lambda t: t[0] == '4HHB')
structure = pdb.values().first()
chain = MmtfSubstructure(structure, 'A', chain_names=['A'], entity_types=['polymer'])
self.assertEqual(1069, chain.num_atoms)
self.assertEqual(141, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
self.assertEqual(np.testing.assert_allclose([6.204, 6.913, 8.504],
chain.x_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([16.869, 17.759, 17.378],
chain.y_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([4.854, 4.607, 4.797],
chain.z_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([49.05, 43.14, 24.80],
chain.b_factor_list[0:3], atol=0.01), None)
self.assertEqual(np.testing.assert_allclose([1.0, 1.0, 1.0],
chain.occupancy_list[0:3], atol=0.01), None)
self.assertListEqual([1, 2, 3], chain.atom_id_list[0:3].tolist())
self.assertListEqual(['', '', ''], chain.alt_loc_list[0:3].tolist())
self.assertListEqual(['A', 'A', 'A'], chain.chain_names[0:3].tolist())
self.assertListEqual(['A', 'A', 'A'], chain.chain_ids[0:3].tolist())
self.assertListEqual(['1', '1', '1'], chain.group_numbers[0:3].tolist())
self.assertListEqual(['VAL', 'VAL', 'VAL'], chain.group_names[0:3].tolist())
self.assertListEqual(['N', 'CA', 'C'], chain.atom_names[0:3].tolist())
self.assertListEqual(['N', 'C', 'C'], chain.elements[0:3].tolist())
self.assertListEqual(['L-PEPTIDE LINKING', 'L-PEPTIDE LINKING'], chain.chem_comp_types[0:2].tolist())
self.assertListEqual([True, True, True], chain.polymer[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.entity_indices[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.sequence_positions[0:3].tolist())
chain = MmtfSubstructure(structure, 'B', chain_names=['B'], entity_types=['polymer'])
self.assertEqual(1123, chain.num_atoms)
self.assertEqual(146, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
self.assertEqual(np.testing.assert_allclose([9.223, 8.694, 9.668],
chain.x_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([-20.614, -20.026, -21.068],
chain.y_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([1.365, -0.123, -1.645],
chain.z_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([46.08, 70.96, 69.74],
chain.b_factor_list[0:3], atol=0.01), None)
self.assertEqual(np.testing.assert_allclose([1.0, 1.0, 1.0],
chain.occupancy_list[0:3], atol=0.01), None)
self.assertListEqual([1070, 1071, 1072], chain.atom_id_list[0:3].tolist())
self.assertListEqual(['', '', ''], chain.alt_loc_list[0:3].tolist())
self.assertListEqual(['B', 'B', 'B'], chain.chain_names[0:3].tolist())
self.assertListEqual(['B', 'B', 'B'], chain.chain_ids[0:3].tolist())
self.assertListEqual(['1', '1', '1'], chain.group_numbers[0:3].tolist())
self.assertListEqual(['VAL', 'VAL', 'VAL'], chain.group_names[0:3].tolist())
self.assertListEqual(['N', 'CA', 'C'], chain.atom_names[0:3].tolist())
self.assertListEqual(['N', 'C', 'C'], chain.elements[0:3].tolist())
self.assertListEqual(['L-PEPTIDE LINKING', 'L-PEPTIDE LINKING'], chain.chem_comp_types[0:2].tolist())
self.assertListEqual([True, True, True], chain.polymer[0:3].tolist())
self.assertListEqual([1, 1, 1], chain.entity_indices[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.sequence_positions[0:3].tolist())
def test_4HHB_polychains(self):
print('test_4HHB_polychains')
path = '../../../resources/files/'
pdb = mmtfReader.read_mmtf_files(path)
pdb = pdb.filter(lambda t: t[0] == '4HHB')
structure = pdb.values().first()
chain = MmtfSubstructure(structure, 'A+B', chain_names=['A', 'B'], entity_types=['polymer'])
self.assertEqual(1069+1123, chain.num_atoms)
self.assertEqual(141+146, chain.num_groups)
self.assertEqual(1+1, chain.num_chains)
self.assertEqual(1, chain.num_models)
def test_4HHB_chain_ids(self):
print('test_4HHB_chain_ids')
path = '../../../resources/files/'
pdb = mmtfReader.read_mmtf_files(path)
pdb = pdb.filter(lambda t: t[0] == '4HHB')
structure = pdb.values().first()
chain = MmtfSubstructure(structure, 'A', chain_ids=['A'])
self.assertEqual(1069, chain.num_atoms)
np.set_printoptions(threshold=np.inf)
print(chain.group_serial)
self.assertEqual(141, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
self.assertEqual(np.testing.assert_allclose([6.204, 6.913, 8.504],
chain.x_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([16.869, 17.759, 17.378],
chain.y_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([4.854, 4.607, 4.797],
chain.z_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([49.05, 43.14, 24.80],
chain.b_factor_list[0:3], atol=0.01), None)
self.assertEqual(np.testing.assert_allclose([1.0, 1.0, 1.0],
chain.occupancy_list[0:3], atol=0.01), None)
self.assertListEqual([1, 2, 3], chain.atom_id_list[0:3].tolist())
self.assertListEqual(['', '', ''], chain.alt_loc_list[0:3].tolist())
self.assertListEqual(['A', 'A', 'A'], chain.chain_names[0:3].tolist())
self.assertListEqual(['A', 'A', 'A'], chain.chain_ids[0:3].tolist())
self.assertListEqual(['1', '1', '1'], chain.group_numbers[0:3].tolist())
self.assertListEqual(['VAL', 'VAL', 'VAL'], chain.group_names[0:3].tolist())
self.assertListEqual(['N', 'CA', 'C'], chain.atom_names[0:3].tolist())
self.assertListEqual(['N', 'C', 'C'], chain.elements[0:3].tolist())
self.assertListEqual(['L-PEPTIDE LINKING', 'L-PEPTIDE LINKING'], chain.chem_comp_types[0:2].tolist())
self.assertListEqual([True, True, True], chain.polymer[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.entity_indices[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.sequence_positions[0:3].tolist())
chain = MmtfSubstructure(structure, 'B', chain_names=['B'], entity_types=['polymer'])
self.assertEqual(1123, chain.num_atoms)
self.assertEqual(146, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
self.assertEqual(np.testing.assert_allclose([9.223, 8.694, 9.668],
chain.x_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([-20.614, -20.026, -21.068],
chain.y_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([1.365, -0.123, -1.645],
chain.z_coord_list[0:3], atol=0.001), None)
self.assertEqual(np.testing.assert_allclose([46.08, 70.96, 69.74],
chain.b_factor_list[0:3], atol=0.01), None)
self.assertEqual(np.testing.assert_allclose([1.0, 1.0, 1.0],
chain.occupancy_list[0:3], atol=0.01), None)
self.assertListEqual([1070, 1071, 1072], chain.atom_id_list[0:3].tolist())
self.assertListEqual(['', '', ''], chain.alt_loc_list[0:3].tolist())
self.assertListEqual(['B', 'B', 'B'], chain.chain_names[0:3].tolist())
self.assertListEqual(['B', 'B', 'B'], chain.chain_ids[0:3].tolist())
self.assertListEqual(['1', '1', '1'], chain.group_numbers[0:3].tolist())
self.assertListEqual(['VAL', 'VAL', 'VAL'], chain.group_names[0:3].tolist())
self.assertListEqual(['N', 'CA', 'C'], chain.atom_names[0:3].tolist())
self.assertListEqual(['N', 'C', 'C'], chain.elements[0:3].tolist())
self.assertListEqual(['L-PEPTIDE LINKING', 'L-PEPTIDE LINKING'], chain.chem_comp_types[0:2].tolist())
self.assertListEqual([True, True, True], chain.polymer[0:3].tolist())
self.assertListEqual([1, 1, 1], chain.entity_indices[0:3].tolist())
self.assertListEqual([0, 0, 0], chain.sequence_positions[0:3].tolist())
def test_4HHB_group_names(self):
print('test_4HHB_chain_ids')
path = '../../../resources/files/'
pdb = mmtfReader.read_mmtf_files(path)
pdb = pdb.filter(lambda t: t[0] == '4HHB')
structure = pdb.values().first()
chain = MmtfSubstructure(structure, 'HEM', chain_names=['A'], group_names=['HEM'])
self.assertEqual(43, chain.num_atoms)
self.assertEqual(1, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
chain = MmtfSubstructure(structure, 'HEM', group_names=['HEM'])
self.assertEqual(43*4, chain.num_atoms)
self.assertEqual(1*4, chain.num_groups)
self.assertEqual(1*4, chain.num_chains)
self.assertEqual(1, chain.num_models)
def test_4HHB_group_numbers(self):
print('test_4HHB_chain_ids')
path = '../../../resources/files/'
pdb = mmtfReader.read_mmtf_files(path)
pdb = pdb.filter(lambda t: t[0] == '4HHB')
structure = pdb.values().first()
chain = MmtfSubstructure(structure, 'HEM', chain_names=['A'], group_numbers=['1', '10'])
self.assertEqual(43, chain.num_atoms)
self.assertEqual(1, chain.num_groups)
self.assertEqual(1, chain.num_chains)
self.assertEqual(1, chain.num_models)
chain = MmtfSubstructure(structure, 'HEM', group_names=['HEM'])
self.assertEqual(43*4, chain.num_atoms)
self.assertEqual(1*4, chain.num_groups)
self.assertEqual(1*4, chain.num_chains)
self.assertEqual(1, chain.num_models)
# def test_4HHB_chains(self):
# print('test_4HHB_chains')
# path = '../../../resources/files/'
# pdb = mmtfReader.read_mmtf_files(path)
# pdb = pdb.filter(lambda t: t[0] == '4HHB')
# structure = pdb.values().first()
# chains = structure.get_chains()
# self.assertEqual(4, len(chains))
# def test_4HHB_chains_first_model(self):
# print('test_4HHB_chains_first_model')
# path = '../../../resources/files/'
# pdb = mmtfReader.read_mmtf_files(path, first_model=True)
# pdb = pdb.filter(lambda t: t[0] == '4HHB')
# structure = pdb.values().first()
# chains = structure.get_chains()
# self.assertEqual(4, len(chains))
#
# def test_1J6T_chains_first_model(self):
# print('test_1J6T_chains_first_model')
# path = '../../../resources/files/'
# pdb = mmtfReader.read_mmtf_files(path, first_model=True)
# pdb = pdb.filter(lambda t: t[0] == '1J6T')
# structure = pdb.values().first()
# chains = structure.get_chains()
# self.assertEqual(2, len(chains))
# def test_4HHB_multiple_chains(self):
# print('test_4HHB_multiple_chains')
# path = '../../../resources/files/'
# pdb = mmtfReader.read_mmtf_files(path)
# pdb = pdb.filter(lambda t: t[0] == '4HHB')
# structure = pdb.values().first()
# chain_list = ['A']
# chains = structure.get_multiple_chains(chain_list)
# self.assertEqual(1168, chains.x_coord_list.size)
# chain_list = ['A', 'B']
# chains = structure.get_multiple_chains(chain_list)
# self.assertEqual(2392, chains.x_coord_list.shape[0])
# chain_list = ['A', 'B', 'C']
# chains = structure.get_multiple_chains(chain_list)
# self.assertEqual(3563, chains.x_coord_list.shape[0])
# chain_list = ['A', 'B', 'C', 'D']
# chains = structure.get_multiple_chains(chain_list)
# self.assertEqual(4779, chains.x_coord_list.shape[0])
def tearDown(self):
self.spark.stop()
if __name__ == '__main__':
unittest.main()
| 55.444 | 109 | 0.587043 | 1,750 | 13,861 | 4.489714 | 0.101714 | 0.016291 | 0.044801 | 0.061092 | 0.914598 | 0.904162 | 0.873234 | 0.869034 | 0.869034 | 0.840524 | 0 | 0.061659 | 0.244138 | 13,861 | 249 | 110 | 55.666667 | 0.68827 | 0.137508 | 0 | 0.826816 | 0 | 0 | 0.048727 | 0.010502 | 0 | 0 | 0 | 0 | 0.581006 | 1 | 0.039106 | false | 0 | 0.027933 | 0 | 0.072626 | 0.039106 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b5d640dc16a26f13ad722f56edb3ce2634e75ee6 | 4,689 | py | Python | mannord/spam_detection_mixins.py | mshavlovsky/mannord | a7bd066e1d8c57bcf220f7234fdf071e19013b49 | [
"BSD-2-Clause"
] | null | null | null | mannord/spam_detection_mixins.py | mshavlovsky/mannord | a7bd066e1d8c57bcf220f7234fdf071e19013b49 | [
"BSD-2-Clause"
] | null | null | null | mannord/spam_detection_mixins.py | mshavlovsky/mannord | a7bd066e1d8c57bcf220f7234fdf071e19013b49 | [
"BSD-2-Clause"
] | null | null | null | from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy import (Column, Integer, Float, String, Boolean,
ForeignKey, DateTime, Sequence, and_)
class UserDirichletMixin(object):
""" Field of this class contains information necessary for spam detection
according to dirichlet method."""
@declared_attr
def sd_base_u_n(cls):
return Column(Float, default=0)
@declared_attr
def sd_base_u_p(cls):
return Column(Float, default=0)
@declared_attr
def sd_reliab(cls):
""" Spam detection reliability is computed based on u_n and u_p."""
return Column(Float, default=0)
@declared_attr
def sd_u_n(cls):
return Column(Float, default=0)
@declared_attr
def sd_u_p(cls):
return Column(Float, default=0)
@declared_attr
def sd_karma_user_base_u_n(cls):
return Column(Float, default=0)
@declared_attr
def sd_karma_user_base_u_p(cls):
return Column(Float, default=0)
@declared_attr
def sd_karma_user_reliab(cls):
""" Spam detection reliability"""
return Column(Float, default=0)
@declared_attr
def sd_karma_user_u_n(cls):
return Column(Float, default=0)
@declared_attr
def sd_karma_user_u_p(cls):
return Column(Float, default=0)
class ItemDirichletMixin(object):
""" Item fields which contains information necessary for spam detection
according to Dirichlet algorithm."""
@declared_attr
def sd_c_n(cls):
""" 'Number' of negative votes for the item"""
return Column(Float, default=0)
@declared_attr
def sd_c_p(cls):
""" 'Number' of positive votes for the item"""
return Column(Float, default=0)
@declared_attr
def sd_weight(cls):
""" weight_spam_k is a weight of an item wich computed in Karger's
algorithm. Negative weight indicates spam.
"""
return Column(Float)
@declared_attr
def sd_frozen(cls):
return Column(Boolean, default=False)
@classmethod
def sd_get_items_offline_spam_detect(cls, session):
items = session.query(cls).filter(
cls.sd_frozen == False).all()
return items
class ActionDirichletMixin(object):
@declared_attr
def sd_frozen(cls):
""" If the field is true, then the action participate in offline spam
detection."""
return Column(Boolean, default=False)
@classmethod
def sd_get_actions_offline_spam_detect(cls, session):
actions = session.query(cls).filter(
cls.sd_frozen == False).all()
return actions
class UserKargerMixin(object):
""" Fileds of this class contains information necessary for spam detection
according to the algorithm by Karger."""
@declared_attr
def sk_base_reliab(cls):
""" This field is a base raliability of a user for spam detection task.
"""
return Column(Float, default=0)
@declared_attr
def sk_reliab(cls):
""" Spam detection reliability"""
return Column(Float, default=0)
@declared_attr
def sk_reliab_raw(cls):
""" Raw reliability is user's reliability before applying asymptotic
function or normalization. We need it to perform online update.
"""
return Column(Float, default=0)
@declared_attr
def sk_karma_user_base_reliab(cls):
""" This field is a base reliability for a karma user ("null" user) who
always votes positively for the user's annotation."""
return Column(Float, default=0)
@declared_attr
def sk_karma_user_reliab(cls):
return Column(Float, default=0)
class ItemKargerMixin(object):
@declared_attr
def sk_weight(cls):
""" weight_spam_k is a weight of an item wich computed in Karger's
algorithm. Negative weight indicates spam.
"""
return Column(Float, default=0)
@declared_attr
def sk_frozen(cls):
return Column(Boolean, default=False)
@classmethod
def sk_get_items_offline_spam_detect(cls, session):
items = session.query(cls).filter(
cls.sk_frozen == False).all()
return items
class ActionKargerMixin(object):
@declared_attr
def sk_frozen(cls):
""" If the field is true, then the action does not participate in
offline spam detection."""
return Column(Boolean, default=False)
@classmethod
def sk_get_actions_offline_spam_detect(cls, session):
actions = session.query(cls).filter(
cls.sk_frozen == False).all()
return actions
| 28.766871 | 79 | 0.653231 | 601 | 4,689 | 4.915141 | 0.193012 | 0.097495 | 0.116791 | 0.146242 | 0.780975 | 0.748815 | 0.719025 | 0.706161 | 0.67671 | 0.604265 | 0 | 0.005195 | 0.261036 | 4,689 | 162 | 80 | 28.944444 | 0.84733 | 0.257198 | 0 | 0.663265 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.27551 | false | 0 | 0.020408 | 0.112245 | 0.632653 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
b5ed9e092a526c71953066be3205defd79154b2b | 8,086 | py | Python | src/VDrone/nanopb/ahrs_pb2.py | brandonbraun653/VirtualDrone | b779167ef3328340015aec46f1e2623c39ee4c0c | [
"MIT"
] | null | null | null | src/VDrone/nanopb/ahrs_pb2.py | brandonbraun653/VirtualDrone | b779167ef3328340015aec46f1e2623c39ee4c0c | [
"MIT"
] | null | null | null | src/VDrone/nanopb/ahrs_pb2.py | brandonbraun653/VirtualDrone | b779167ef3328340015aec46f1e2623c39ee4c0c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: ahrs.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='ahrs.proto',
package='',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\nahrs.proto\"A\n\x0b\x41\x63\x63\x65lSample\x12\t\n\x01x\x18\x01 \x02(\x02\x12\t\n\x01y\x18\x02 \x02(\x02\x12\t\n\x01z\x18\x03 \x02(\x02\x12\x11\n\ttimestamp\x18\x04 \x02(\r\"@\n\nGyroSample\x12\t\n\x01x\x18\x01 \x02(\x02\x12\t\n\x01y\x18\x02 \x02(\x02\x12\t\n\x01z\x18\x03 \x02(\x02\x12\x11\n\ttimestamp\x18\x04 \x02(\r\"?\n\tMagSample\x12\t\n\x01x\x18\x01 \x02(\x02\x12\t\n\x01y\x18\x02 \x02(\x02\x12\t\n\x01z\x18\x03 \x02(\x02\x12\x11\n\ttimestamp\x18\x04 \x02(\r'
)
_ACCELSAMPLE = _descriptor.Descriptor(
name='AccelSample',
full_name='AccelSample',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='AccelSample.x', index=0,
number=1, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y', full_name='AccelSample.y', index=1,
number=2, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='z', full_name='AccelSample.z', index=2,
number=3, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='AccelSample.timestamp', index=3,
number=4, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=14,
serialized_end=79,
)
_GYROSAMPLE = _descriptor.Descriptor(
name='GyroSample',
full_name='GyroSample',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='GyroSample.x', index=0,
number=1, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y', full_name='GyroSample.y', index=1,
number=2, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='z', full_name='GyroSample.z', index=2,
number=3, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='GyroSample.timestamp', index=3,
number=4, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=81,
serialized_end=145,
)
_MAGSAMPLE = _descriptor.Descriptor(
name='MagSample',
full_name='MagSample',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='MagSample.x', index=0,
number=1, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='y', full_name='MagSample.y', index=1,
number=2, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='z', full_name='MagSample.z', index=2,
number=3, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='MagSample.timestamp', index=3,
number=4, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=147,
serialized_end=210,
)
DESCRIPTOR.message_types_by_name['AccelSample'] = _ACCELSAMPLE
DESCRIPTOR.message_types_by_name['GyroSample'] = _GYROSAMPLE
DESCRIPTOR.message_types_by_name['MagSample'] = _MAGSAMPLE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AccelSample = _reflection.GeneratedProtocolMessageType('AccelSample', (_message.Message,), {
'DESCRIPTOR' : _ACCELSAMPLE,
'__module__' : 'ahrs_pb2'
# @@protoc_insertion_point(class_scope:AccelSample)
})
_sym_db.RegisterMessage(AccelSample)
GyroSample = _reflection.GeneratedProtocolMessageType('GyroSample', (_message.Message,), {
'DESCRIPTOR' : _GYROSAMPLE,
'__module__' : 'ahrs_pb2'
# @@protoc_insertion_point(class_scope:GyroSample)
})
_sym_db.RegisterMessage(GyroSample)
MagSample = _reflection.GeneratedProtocolMessageType('MagSample', (_message.Message,), {
'DESCRIPTOR' : _MAGSAMPLE,
'__module__' : 'ahrs_pb2'
# @@protoc_insertion_point(class_scope:MagSample)
})
_sym_db.RegisterMessage(MagSample)
# @@protoc_insertion_point(module_scope)
| 37.785047 | 488 | 0.738931 | 1,047 | 8,086 | 5.393505 | 0.115568 | 0.055251 | 0.084115 | 0.076501 | 0.757039 | 0.742164 | 0.742164 | 0.735081 | 0.712237 | 0.712237 | 0 | 0.035045 | 0.135419 | 8,086 | 213 | 489 | 37.962441 | 0.772708 | 0.043285 | 0 | 0.713514 | 1 | 0.005405 | 0.117959 | 0.060987 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021622 | 0 | 0.021622 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd17ea3443a966419ddced7f8488b3f8ed0cb321 | 156 | py | Python | app/api/__init__.py | najens/item_catalog | 1d5a3d6d2edc1b65bfab72c6a3a6644729ecf79d | [
"MIT"
] | null | null | null | app/api/__init__.py | najens/item_catalog | 1d5a3d6d2edc1b65bfab72c6a3a6644729ecf79d | [
"MIT"
] | null | null | null | app/api/__init__.py | najens/item_catalog | 1d5a3d6d2edc1b65bfab72c6a3a6644729ecf79d | [
"MIT"
] | null | null | null | from flask import Blueprint
# Setup api blueprint
api = Blueprint('api', __name__)
# Import blueprint views
from .views import categories, items # noqa
| 17.333333 | 44 | 0.75641 | 20 | 156 | 5.7 | 0.55 | 0.263158 | 0.263158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173077 | 156 | 8 | 45 | 19.5 | 0.883721 | 0.301282 | 0 | 0 | 0 | 0 | 0.028571 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
951755bb41c5dd031feec0bdeaac7ded927d7cea | 3,125 | py | Python | tests/test_event_handler.py | begor/follower_maze | d76315e5085d4566f5730d70b51559b2262bc827 | [
"MIT"
] | null | null | null | tests/test_event_handler.py | begor/follower_maze | d76315e5085d4566f5730d70b51559b2262bc827 | [
"MIT"
] | null | null | null | tests/test_event_handler.py | begor/follower_maze | d76315e5085d4566f5730d70b51559b2262bc827 | [
"MIT"
] | null | null | null | import unittest
from follower_maze import events
from tests import factories
from tests.helpers import async_test
class TestEventHandler(unittest.TestCase):
"""
Test case for EventHandler.
Mostly test correct ordering of events.
Client notifies are tested in test_pipeline.
"""
@async_test
async def tearDown(self):
await events.handler.EventHandler.reset()
@async_test
async def test_one_event_in_order(self):
event = factories.get_broadcast_event(1)
await events.handler.EventHandler.new(event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 1)
@async_test
async def test_two_events_in_order(self):
first_event = factories.get_broadcast_event(1)
await events.handler.EventHandler.new(first_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 1)
second_event = factories.get_broadcast_event(2)
await events.handler.EventHandler.new(second_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 2)
@async_test
async def test_one_event_out_order(self):
event = factories.get_broadcast_event(2)
await events.handler.EventHandler.new(event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
@async_test
async def test_two_events_out_order(self):
second_event = factories.get_broadcast_event(2)
await events.handler.EventHandler.new(second_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
first_event = factories.get_broadcast_event(3)
await events.handler.EventHandler.new(first_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
@async_test
async def test_two_events_mixed_order(self):
second_event = factories.get_broadcast_event(2)
await events.handler.EventHandler.new(second_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
first_event = factories.get_broadcast_event(1)
await events.handler.EventHandler.new(first_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 2)
@async_test
async def test_four_events_mixed_order(self):
fourth_event = factories.get_broadcast_event(4)
await events.handler.EventHandler.new(fourth_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
second_event = factories.get_broadcast_event(2)
await events.handler.EventHandler.new(second_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 0)
first_event = factories.get_broadcast_event(1)
await events.handler.EventHandler.new(first_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 2)
third_event = factories.get_broadcast_event(3)
await events.handler.EventHandler.new(third_event)
self.assertEqual(await events.handler.EventHandler.get_seq_no(), 4)
if __name__ == '__main__':
unittest.main()
| 34.340659 | 75 | 0.72768 | 398 | 3,125 | 5.437186 | 0.145729 | 0.127079 | 0.207948 | 0.34658 | 0.822089 | 0.792514 | 0.792514 | 0.753235 | 0.753235 | 0.753235 | 0 | 0.00943 | 0.1856 | 3,125 | 90 | 76 | 34.722222 | 0.840864 | 0.03616 | 0 | 0.603448 | 0 | 0 | 0.002688 | 0 | 0 | 0 | 0 | 0 | 0.206897 | 1 | 0 | false | 0 | 0.068966 | 0 | 0.086207 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
20fa7c51432920af387ab79f17e647054a42c628 | 286 | py | Python | avionix/chart/__init__.py | Maxiimeeb/avionix | c149e4319c8c8c00d50450ec1644545340ff7322 | [
"BSD-3-Clause"
] | 51 | 2020-07-17T11:42:44.000Z | 2022-03-17T23:51:28.000Z | avionix/chart/__init__.py | Maxiimeeb/avionix | c149e4319c8c8c00d50450ec1644545340ff7322 | [
"BSD-3-Clause"
] | 55 | 2020-07-14T21:21:14.000Z | 2022-03-04T22:43:10.000Z | avionix/chart/__init__.py | Maxiimeeb/avionix | c149e4319c8c8c00d50450ec1644545340ff7322 | [
"BSD-3-Clause"
] | 9 | 2021-01-05T01:52:14.000Z | 2022-02-16T12:42:18.000Z | # flake8: noqa
from avionix.chart.chart_builder import ChartBuilder
from avionix.chart.chart_dependency import ChartDependency
from avionix.chart.chart_info import ChartInfo
from avionix.chart.chart_maintainer import ChartMaintainer
from avionix.chart.values_yaml import Value, Values
| 35.75 | 58 | 0.867133 | 38 | 286 | 6.394737 | 0.447368 | 0.226337 | 0.329218 | 0.345679 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003831 | 0.087413 | 286 | 7 | 59 | 40.857143 | 0.927203 | 0.041958 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
2f1d83b8039eedbbe91d800cb7c7788ac6a89ee4 | 1,191 | py | Python | Beepitett_teszt.py | balcsi32/OpenNRE-1 | 63f2662228b37a38af78ced864a38e0238511d26 | [
"MIT"
] | null | null | null | Beepitett_teszt.py | balcsi32/OpenNRE-1 | 63f2662228b37a38af78ced864a38e0238511d26 | [
"MIT"
] | null | null | null | Beepitett_teszt.py | balcsi32/OpenNRE-1 | 63f2662228b37a38af78ced864a38e0238511d26 | [
"MIT"
] | null | null | null | import opennre
model = opennre.get_model('wiki80_cnn_softmax')
print(model.infer({'text': 'He was the son of Máel Dúin mac Máele Fithrich, and grandson of the high king Áed Uaridnach (died 612).', 'h': {'pos': (18, 46)}, 't': {'pos': (78, 91)}}))
model = opennre.get_model('wiki80_bert_softmax')
print(model.infer({'text': 'He was the son of Máel Dúin mac Máele Fithrich, and grandson of the high king Áed Uaridnach (died 612).', 'h': {'pos': (18, 46)}, 't': {'pos': (78, 91)}}))
model = opennre.get_model('wiki80_bertentity_softmax')
print(model.infer({'text': 'He was the son of Máel Dúin mac Máele Fithrich, and grandson of the high king Áed Uaridnach (died 612).', 'h': {'pos': (18, 46)}, 't': {'pos': (78, 91)}}))
model = opennre.get_model('tacred_bert_softmax')
print(model.infer({'text': 'He was the son of Máel Dúin mac Máele Fithrich, and grandson of the high king Áed Uaridnach (died 612).', 'h': {'pos': (18, 46)}, 't': {'pos': (78, 91)}}))
model = opennre.get_model('tacred_bertentity_softmax')
print(model.infer({'text': 'He was the son of Máel Dúin mac Máele Fithrich, and grandson of the high king Áed Uaridnach (died 612).', 'h': {'pos': (18, 46)}, 't': {'pos': (78, 91)}}))
| 99.25 | 183 | 0.670865 | 197 | 1,191 | 3.979695 | 0.192893 | 0.076531 | 0.095663 | 0.127551 | 0.979592 | 0.946429 | 0.946429 | 0.946429 | 0.946429 | 0.946429 | 0 | 0.059281 | 0.13602 | 1,191 | 11 | 184 | 108.272727 | 0.702624 | 0 | 0 | 0.454545 | 0 | 0.454545 | 0.571788 | 0.041982 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0.454545 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
2f21c5ea86c3499fe0a93827deea60dab3ab1b0a | 9,903 | py | Python | test/test_cases__yb_chunk_dml_by_integer.py | eloemosynator/YbEasyCli | b35ebe03da07898cfa06ff687cba29cd83268c31 | [
"MIT"
] | null | null | null | test/test_cases__yb_chunk_dml_by_integer.py | eloemosynator/YbEasyCli | b35ebe03da07898cfa06ff687cba29cd83268c31 | [
"MIT"
] | 4 | 2020-06-03T18:11:29.000Z | 2022-03-07T20:41:16.000Z | test/test_cases__yb_chunk_dml_by_integer.py | eloemosynator/YbEasyCli | b35ebe03da07898cfa06ff687cba29cd83268c31 | [
"MIT"
] | 2 | 2020-05-27T23:43:03.000Z | 2022-03-03T23:16:15.000Z | map_out = {
r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{1,6}(-|\+)\d{2}' : 'YYYY-MM-DD HH:MM:SS.FFFFFF-TZ'
, r'\d{2}:\d{2}:\d{2}.\d{1,6}' : 'HH:MM:SS.FFFFFF'
, r'\d{4}-\d{2}-\d{2}' : 'YYYY-MM-DD'}
test_cases = [
test_case(
cmd=('yb_chunk_dml_by_integer.py @{argsdir}/yb_chunk_dml_by_integer__args1 '
'--column col4 --execute_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 18:19:38.201736-06: Starting Integer Chunking, first calculating group counts
--2020-08-22 18:19:38.301736-06: Build Chunk Groupings, first pass
--2020-08-22 18:19:39.431422-06: Build Chunk DMLs
--2020-08-22 18:19:39.522147-06: Chunk: 1, Rows: 100000, Range 1000000 <= col4 < 47500950000
--2020-08-22 18:19:39.822828-06: Chunk: 2, Rows: 100000, Range 47500950000 <= col4 < 90000900000
--2020-08-22 18:19:40.154894-06: Chunk: 3, Rows: 100000, Range 90000900000 <= col4 < 127500850000
--2020-08-22 18:19:40.462646-06: Chunk: 4, Rows: 100000, Range 127500850000 <= col4 < 160000800000
--2020-08-22 18:19:40.781904-06: Chunk: 5, Rows: 100000, Range 160000800000 <= col4 < 187500750000
--2020-08-22 18:19:41.121436-06: Chunk: 6, Rows: 100000, Range 187500750000 <= col4 < 210000700000
--2020-08-22 18:19:41.398286-06: Chunk: 7, Rows: 100000, Range 210000700000 <= col4 < 227500650000
--2020-08-22 18:19:41.758007-06: Chunk: 8, Rows: 100000, Range 227500650000 <= col4 < 240000600000
--2020-08-22 18:19:42.12159-06: Chunk: 9, Rows: 100000, Range 240000600000 <= col4 < 247500550000
--2020-08-22 18:19:42.432212-06: Chunk: 10, Rows: 100000, Range 247500550000 <= col4 < 250000500001
--2020-08-22 18:19:42.672871-06: Chunk: 11, Rows: 0, col4 IS NULL
--2020-08-22 18:19:42.916537-06: Completed Integer Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:04.71574
--Overhead duration : 00:00:02.176085
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 100000
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
, test_case(
cmd=('yb_chunk_dml_by_integer.py @{argsdir}/yb_chunk_dml_by_integer__args1 '
'--column col4 --print_chunk_dml --null_chunk_off --verbose_chunk_off')
, exit_code=0
, stdout="""-- Running DML chunking.
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 100000) >>>*/ 1000000 <= col4 AND col4 < 47500950000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100000) >>>*/ 47500950000 <= col4 AND col4 < 90000900000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 100000) >>>*/ 90000900000 <= col4 AND col4 < 127500850000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100000) >>>*/ 127500850000 <= col4 AND col4 < 160000800000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100000) >>>*/ 160000800000 <= col4 AND col4 < 187500750000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100000) >>>*/ 187500750000 <= col4 AND col4 < 210000700000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 100000) >>>*/ 210000700000 <= col4 AND col4 < 227500650000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100000) >>>*/ 227500650000 <= col4 AND col4 < 240000600000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100000) >>>*/ 240000600000 <= col4 AND col4 < 247500550000 /*<<< chunk_clause */;
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 100000) >>>*/ 247500550000 <= col4 AND col4 < 250000500001 /*<<< chunk_clause */;
-- Completed DML chunking."""
, stderr='')
, test_case(
cmd=('yb_chunk_dml_by_integer.py @{argsdir}/yb_chunk_dml_by_integer__args1 '
'--column col4 --print_chunk_dml')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-08-22 19:26:27.672082-06: Starting Integer Chunking, first calculating group counts
--2020-08-22 19:26:27.801736-06: Build Chunk Groupings, first pass
--2020-08-22 19:26:28.922245-06: Build Chunk DMLs
--2020-08-22 19:26:29.010727-06: Chunk: 1, Rows: 100000, Range 1000000 <= col4 < 47500950000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 1, size: 100000) >>>*/ 1000000 <= col4 AND col4 < 47500950000 /*<<< chunk_clause */;
--2020-08-22 19:26:29.321661-06: Chunk: 2, Rows: 100000, Range 47500950000 <= col4 < 90000900000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 2, size: 100000) >>>*/ 47500950000 <= col4 AND col4 < 90000900000 /*<<< chunk_clause */;
--2020-08-22 19:26:29.615457-06: Chunk: 3, Rows: 100000, Range 90000900000 <= col4 < 127500850000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 3, size: 100000) >>>*/ 90000900000 <= col4 AND col4 < 127500850000 /*<<< chunk_clause */;
--2020-08-22 19:26:29.913853-06: Chunk: 4, Rows: 100000, Range 127500850000 <= col4 < 160000800000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 4, size: 100000) >>>*/ 127500850000 <= col4 AND col4 < 160000800000 /*<<< chunk_clause */;
--2020-08-22 19:26:30.260152-06: Chunk: 5, Rows: 100000, Range 160000800000 <= col4 < 187500750000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 5, size: 100000) >>>*/ 160000800000 <= col4 AND col4 < 187500750000 /*<<< chunk_clause */;
--2020-08-22 19:26:30.536624-06: Chunk: 6, Rows: 100000, Range 187500750000 <= col4 < 210000700000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 6, size: 100000) >>>*/ 187500750000 <= col4 AND col4 < 210000700000 /*<<< chunk_clause */;
--2020-08-22 19:26:30.822253-06: Chunk: 7, Rows: 100000, Range 210000700000 <= col4 < 227500650000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 7, size: 100000) >>>*/ 210000700000 <= col4 AND col4 < 227500650000 /*<<< chunk_clause */;
--2020-08-22 19:26:31.15679-06: Chunk: 8, Rows: 100000, Range 227500650000 <= col4 < 240000600000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 8, size: 100000) >>>*/ 227500650000 <= col4 AND col4 < 240000600000 /*<<< chunk_clause */;
--2020-08-22 19:26:31.447927-06: Chunk: 9, Rows: 100000, Range 240000600000 <= col4 < 247500550000
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 9, size: 100000) >>>*/ 240000600000 <= col4 AND col4 < 247500550000 /*<<< chunk_clause */;
--2020-08-22 19:26:31.791157-06: Chunk: 10, Rows: 100000, Range 247500550000 <= col4 < 250000500001
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE /* chunk_clause(chunk: 10, size: 100000) >>>*/ 247500550000 <= col4 AND col4 < 250000500001 /*<<< chunk_clause */;
--2020-08-22 19:26:32.112984-06: Chunk: 11, Rows: 0, col4 IS NULL
INSERT INTO new_chunked_table SELECT * FROM {db1}.dev.data_types_t WHERE col4 IS NULL;
--2020-08-22 19:26:32.349486-06: Completed Integer Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:04.678549
--Overhead duration : 00:00:02.19171
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 100000
--Average chunk size : 90909
-- Completed DML chunking."""
, stderr=''
, map_out=map_out)
, test_case(
cmd=('yb_chunk_dml_by_integer.py @{argsdir}/yb_chunk_dml_by_integer__args1 '
'--column col1 --column_cardinality high')
, exit_code=0
, stdout="""-- Running DML chunking.
--2020-12-25 21:16:02.899221-08: Starting Integer Chunking, first calculating group counts
--2020-12-25 21:16:03.053718-08: Build Chunk Groupings, first pass
--2020-12-25 21:16:03.278257-08: Build Chunk DMLs
--2020-12-25 21:16:03.280744-08: Chunk: 1, Rows: 100095, Range 1 <= col1 < 100096
--2020-12-25 21:16:03.283273-08: Chunk: 2, Rows: 100096, Range 100096 <= col1 < 200192
--2020-12-25 21:16:03.285346-08: Chunk: 3, Rows: 100096, Range 200192 <= col1 < 300288
--2020-12-25 21:16:03.287403-08: Chunk: 4, Rows: 100096, Range 300288 <= col1 < 400384
--2020-12-25 21:16:03.289469-08: Chunk: 5, Rows: 100096, Range 400384 <= col1 < 500480
--2020-12-25 21:16:03.291527-08: Chunk: 6, Rows: 100096, Range 500480 <= col1 < 600576
--2020-12-25 21:16:03.293583-08: Chunk: 7, Rows: 100096, Range 600576 <= col1 < 700672
--2020-12-25 21:16:03.295636-08: Chunk: 8, Rows: 100096, Range 700672 <= col1 < 800768
--2020-12-25 21:16:03.297689-08: Chunk: 9, Rows: 100096, Range 800768 <= col1 < 900864
--2020-12-25 21:16:03.299731-08: Chunk: 10, Rows: 99137, Range 900864 <= col1 < 1000001
--2020-12-25 21:16:03.300263-08: Chunk: 11, Rows: 0, col1 IS NULL
--2020-12-25 21:16:03.300573-08: Completed Integer Chunked DML
--Total Rows : 1000000
--IS NULL Rows : 0
--Running total check: PASSED
--Duration : 00:00:00.402947
--Overhead duration : 00:00:00.403053
--Total Chunks : 11
--Min chunk size : 100000
--Largest chunk size : 100096
--Average chunk size : 90909
-- Completed DML chunking.
"""
, stderr=''
, map_out=map_out)
] | 73.355556 | 187 | 0.688478 | 1,525 | 9,903 | 4.345574 | 0.125246 | 0.066395 | 0.036215 | 0.063377 | 0.864946 | 0.85378 | 0.787083 | 0.771541 | 0.731553 | 0.565263 | 0 | 0.318214 | 0.153994 | 9,903 | 135 | 188 | 73.355556 | 0.472786 | 0 | 0 | 0.48855 | 0 | 0.458015 | 0.937702 | 0.079867 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.045802 | 0 | 0 | 0 | 0.015267 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
2f38da4010203c5618069cf92343e044bc51381a | 3,035 | py | Python | tests/test_state.py | tulth/gcode_gen | d6e276f2074d4fe66755b2ae06c5b4d85583c563 | [
"BSD-3-Clause"
] | null | null | null | tests/test_state.py | tulth/gcode_gen | d6e276f2074d4fe66755b2ae06c5b4d85583c563 | [
"BSD-3-Clause"
] | null | null | null | tests/test_state.py | tulth/gcode_gen | d6e276f2074d4fe66755b2ae06c5b4d85583c563 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Sample Test passing with nose and pytest
import unittest
import numpy as np
from gcode_gen.tool import Carbide3D_101
from gcode_gen.state import State, CncState, DEFAULT_START
class TestState(unittest.TestCase):
def test_create(self):
state = State(z_safe=40, position=DEFAULT_START)
self.assertEqual(state['z_safe'], 40)
actual = state['position'].arr
expect = DEFAULT_START.arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_let(self):
state = State(z_safe=45, feed_rate=40)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
with state.let(feed_rate=15, z_safe=10):
self.assertEqual(state['feed_rate'], 15)
self.assertEqual(state['z_safe'], 10)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
def test_excursion(self):
state = State(z_safe=45, feed_rate=40)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
with state.excursion():
state['z_safe'] = -12
state['feed_rate'] = 100
self.assertEqual(state['feed_rate'], 100)
self.assertEqual(state['z_safe'], -12)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
def test_excursion_nosave(self):
state = State(z_safe=45, feed_rate=40)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
with state.excursion(nosave=('z_safe', )):
state['z_safe'] = -12
state['feed_rate'] = 100
self.assertEqual(state['feed_rate'], 100)
self.assertEqual(state['z_safe'], -12)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], -12)
class TestCncState(unittest.TestCase):
def test_create(self):
tool = Carbide3D_101()
state = CncState(tool=tool, z_safe=40)
self.assertEqual(state['tool'], tool)
self.assertEqual(state['z_safe'], 40)
actual = state['position'].arr
expect = DEFAULT_START.arr
self.assertTrue(np.allclose(actual, expect), 'actual: {}\nexpect:{}'.format(actual, expect))
def test_let(self):
tool = Carbide3D_101()
state = CncState(tool=tool, z_safe=45, feed_rate=40)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
with state.let(feed_rate=15, z_safe=10):
self.assertEqual(state['feed_rate'], 15)
self.assertEqual(state['z_safe'], 10)
self.assertEqual(state['feed_rate'], 40)
self.assertEqual(state['z_safe'], 45)
def test_copy(self):
tool = Carbide3D_101()
state = CncState(milling_feed_rate=40)
self.assertEqual(state['milling_feed_rate'], 40)
self.assertEqual(state.copy()['milling_feed_rate'], 40)
| 37.9375 | 100 | 0.627348 | 391 | 3,035 | 4.693095 | 0.14578 | 0.237057 | 0.316076 | 0.179837 | 0.844687 | 0.836512 | 0.783651 | 0.743324 | 0.743324 | 0.743324 | 0 | 0.048718 | 0.228995 | 3,035 | 79 | 101 | 38.417722 | 0.73547 | 0.020099 | 0 | 0.727273 | 0 | 0 | 0.109018 | 0 | 0 | 0 | 0 | 0 | 0.469697 | 1 | 0.106061 | false | 0 | 0.060606 | 0 | 0.19697 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
85de22e455744b19d39cd570d4793e152800e526 | 37 | py | Python | CSES-Problemset/introductory/bit_strings.py | rranjan14/cp-solutions | 9614508efbed1e4ee8b970b5eed535d782a5783f | [
"MIT"
] | null | null | null | CSES-Problemset/introductory/bit_strings.py | rranjan14/cp-solutions | 9614508efbed1e4ee8b970b5eed535d782a5783f | [
"MIT"
] | null | null | null | CSES-Problemset/introductory/bit_strings.py | rranjan14/cp-solutions | 9614508efbed1e4ee8b970b5eed535d782a5783f | [
"MIT"
] | null | null | null | print((1<<(int(input())))%1000000007) | 37 | 37 | 0.648649 | 5 | 37 | 4.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.297297 | 0 | 37 | 1 | 37 | 37 | 0.351351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
85e05eb0d9ef2433d7fc89cda418d8fb1813c0d8 | 2,194 | py | Python | products/tests/test_resources.py | jszafran/w84it | 62ce3160dcc0e188f2e9d4d7df1f7abe12bd14f7 | [
"MIT"
] | 1 | 2019-04-14T10:08:46.000Z | 2019-04-14T10:08:46.000Z | products/tests/test_resources.py | jszafran/w84it | 62ce3160dcc0e188f2e9d4d7df1f7abe12bd14f7 | [
"MIT"
] | 14 | 2019-01-30T16:22:30.000Z | 2020-06-05T20:02:51.000Z | products/tests/test_resources.py | jszafran/w84it | 62ce3160dcc0e188f2e9d4d7df1f7abe12bd14f7 | [
"MIT"
] | null | null | null | datasets = {
'full_valid_data':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 23.5,
'currency': 'USD',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_url':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my pro_duct.com',
'price': 23.5,
'currency': 'USD',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_price_too_many_decimals':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 23.523342,
'currency': 'USD',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_price_too_many_digits':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 1234571233223.52,
'currency': 'USD',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_price_too_manay_digits_and_decimals':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 1234571233223.52233,
'currency': 'USD',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_price_with_no_currency':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 125.99,
'currency': '-',
'work_start_date': '2018-12-01',
'launch_date': '2019-05-25'
},
'invalid_data_wrong_dates':
{'name': 'test_product',
'description': 'Its going to be awesome!',
'url': 'http://my-product.com',
'price': 125.99,
'currency': '-',
'work_start_date': '2018-14-01',
'launch_date': '2019-17-32'
}
}
| 33.242424 | 55 | 0.514585 | 246 | 2,194 | 4.353659 | 0.219512 | 0.052288 | 0.098039 | 0.169935 | 0.897292 | 0.897292 | 0.897292 | 0.897292 | 0.897292 | 0.897292 | 0 | 0.110892 | 0.305378 | 2,194 | 65 | 56 | 33.753846 | 0.591864 | 0 | 0 | 0.661538 | 0 | 0 | 0.530538 | 0.080675 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c82d3bae716464df96943ff0245f82c02d42a86e | 99 | py | Python | src/eztao/ts/__init__.py | ywx649999311/EzTao | d4d27ac17c675b585e0443f822240df88fcdef57 | [
"MIT"
] | 11 | 2020-12-10T13:11:37.000Z | 2022-03-28T21:00:19.000Z | src/eztao/ts/__init__.py | ywx649999311/EzTao | d4d27ac17c675b585e0443f822240df88fcdef57 | [
"MIT"
] | 33 | 2020-12-09T02:01:43.000Z | 2022-03-21T19:53:00.000Z | src/eztao/ts/__init__.py | ywx649999311/EzTao | d4d27ac17c675b585e0443f822240df88fcdef57 | [
"MIT"
] | 3 | 2020-12-10T17:27:12.000Z | 2021-11-12T05:55:33.000Z | from .utils import *
from .carma_fit import *
from .carma_sim import *
from .carma_mcmc import mcmc | 24.75 | 28 | 0.777778 | 16 | 99 | 4.625 | 0.4375 | 0.405405 | 0.608108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.151515 | 99 | 4 | 28 | 24.75 | 0.880952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c85c0f638aea1efcb267a4be5aebb81e515f0ccc | 1,680 | py | Python | home/migrations/0037_auto_20220126_1535.py | ianshulx/egiportal | 3a147a4a61e58f2a6229e08a5a4c256d7b674b81 | [
"MIT"
] | 1 | 2022-02-17T10:34:21.000Z | 2022-02-17T10:34:21.000Z | home/migrations/0037_auto_20220126_1535.py | ianshulx/egiportal | 3a147a4a61e58f2a6229e08a5a4c256d7b674b81 | [
"MIT"
] | null | null | null | home/migrations/0037_auto_20220126_1535.py | ianshulx/egiportal | 3a147a4a61e58f2a6229e08a5a4c256d7b674b81 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.14 on 2022-01-26 10:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0036_auto_20211226_1358'),
]
operations = [
migrations.AlterField(
model_name='book',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='event1',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='image',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='main_event',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='notice',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='notice1',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='quiz',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 34.285714 | 108 | 0.594048 | 177 | 1,680 | 5.457627 | 0.265537 | 0.086957 | 0.181159 | 0.210145 | 0.774327 | 0.774327 | 0.774327 | 0.774327 | 0.774327 | 0.774327 | 0 | 0.028192 | 0.282143 | 1,680 | 48 | 109 | 35 | 0.772803 | 0.027381 | 0 | 0.666667 | 1 | 0 | 0.059436 | 0.014093 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.02381 | 0 | 0.095238 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8d02c02c656b0fbcc39368efcf9f923d45d7db16 | 25,118 | py | Python | forest/binary_trees/single_threaded_binary_trees.py | shunsvineyard/forest-python | 729c2980fbb9f90b056cb92ca5eb0ad6091a2dc5 | [
"MIT"
] | 8 | 2021-03-17T21:31:10.000Z | 2022-01-26T17:07:59.000Z | forest/binary_trees/single_threaded_binary_trees.py | shunsvineyard/forest-python | 729c2980fbb9f90b056cb92ca5eb0ad6091a2dc5 | [
"MIT"
] | 20 | 2021-03-09T07:23:53.000Z | 2022-01-29T22:10:24.000Z | forest/binary_trees/single_threaded_binary_trees.py | shunsvineyard/forest-python | 729c2980fbb9f90b056cb92ca5eb0ad6091a2dc5 | [
"MIT"
] | null | null | null | # Copyright © 2021 by Shun Huang. All rights reserved.
# Licensed under MIT License.
# See LICENSE in the project root for license information.
"""Single Threaded Binary Search Trees."""
import dataclasses
from typing import Any, Optional
from forest.binary_trees import traversal
from forest import tree_exceptions
@dataclasses.dataclass
class Node:
"""Single Threaded Tree node definition."""
key: Any
data: Any
left: Optional["Node"] = None
right: Optional["Node"] = None
parent: Optional["Node"] = None
is_thread: bool = False
class RightThreadedBinaryTree:
"""Right Threaded Binary Tree.
Attributes
----------
root: `Optional[Node]`
The root node of the right threaded binary search tree.
empty: `bool`
`True` if the tree is empty; `False` otherwise.
Methods
-------
Core Functions
search(key: `Any`)
Look for a node based on the given key.
insert(key: `Any`, data: `Any`)
Insert a (key, data) pair into a binary tree.
delete(key: `Any`)
Delete a node based on the given key from the binary tree.
Auxiliary Functions
get_leftmost(node: `Node`)
Return the node whose key is the smallest from the given subtree.
get_rightmost(node: `Node` = `None`)
Return the node whose key is the biggest from the given subtree.
get_successor(node: `Node`)
Return the successor node in the in-order order.
get_predecessor(node: `Node`)
Return the predecessor node in the in-order order.
get_height(node: `Optional[Node]`)
Return the height of the given node.
Traversal Functions
inorder_traverse()
In-order traversal by using the right threads.
preorder_traverse()
Pre-order traversal by using the right threads.
"""
def __init__(self) -> None:
self.root: Optional[Node] = None
def __repr__(self) -> str:
"""Provie the tree representation to visualize its layout."""
if self.root:
return (
f"{type(self)}, root={self.root}, "
f"tree_height={str(self.get_height(self.root))}"
)
return "empty tree"
@property
def empty(self) -> bool:
"""bool: `True` if the tree is empty; `False` otherwise.
Notes
-----
The property, `empty`, is read-only.
"""
return self.root is None
def search(self, key: Any) -> Optional[Node]:
"""Look for a node by a given key.
Parameters
----------
key: `Any`
The key associated with the node.
Returns
-------
`Optional[Node]`
The node found by the given key.
If the key does not exist, return `None`.
"""
current = self.root
while current:
if key == current.key:
return current
elif key < current.key:
current = current.left
else: # key > current.key
if current.is_thread:
break
current = current.right
return None
def insert(self, key: Any, data: Any) -> None:
"""Insert a (key, data) pair into the binary search tree.
Parameters
----------
key: `Any`
The key associated with the data.
data: `Any`
The data to be inserted.
Raises
------
`DuplicateKeyError`
Raised if the key to be insted has existed in the tree.
"""
new_node = Node(key=key, data=data)
parent: Optional[Node] = None
current: Optional[Node] = self.root
while current:
parent = current
if new_node.key < current.key:
current = current.left
elif new_node.key > current.key:
# If the node is thread, meaning it's a leaf node.
if current.is_thread:
current = None
else:
current = current.right
else:
raise tree_exceptions.DuplicateKeyError(key=new_node.key)
new_node.parent = parent
# If the tree is empty
if parent is None:
self.root = new_node
elif new_node.key < parent.key:
parent.left = new_node
# Update thread
new_node.right = parent
new_node.is_thread = True
else:
# Update thread
new_node.is_thread = parent.is_thread
new_node.right = parent.right
parent.is_thread = False
# Parent's right must be set after thread update
parent.right = new_node
def delete(self, key: Any) -> None:
"""Delete a node according to the given key.
Parameters
----------
key: `Any`
The key of the node to be deleted.
"""
if self.root and (deleting_node := self.search(key=key)):
# Case 1: no child
if deleting_node.left is None and (
deleting_node.right is None or deleting_node.is_thread
):
self._transplant(deleting_node=deleting_node, replacing_node=None)
# Case 2a: only one left child
elif deleting_node.left and (
deleting_node.is_thread
or deleting_node.right is None
# deleting_node.right is None means the deleting node is the root.
):
predecessor = self.get_predecessor(node=deleting_node)
if predecessor:
predecessor.right = deleting_node.right
self._transplant(
deleting_node=deleting_node, replacing_node=deleting_node.left
)
# Case 2b: only one right child
elif deleting_node.left is None and deleting_node.is_thread is False:
self._transplant(
deleting_node=deleting_node, replacing_node=deleting_node.right
)
# Case 3: two children
elif (
deleting_node.left
and deleting_node.right
and deleting_node.is_thread is False
):
predecessor = self.get_predecessor(node=deleting_node)
replacing_node: Node = self.get_leftmost(node=deleting_node.right)
# the leftmost node is not the direct child of the deleting node
if replacing_node.parent != deleting_node:
if replacing_node.is_thread:
self._transplant(
deleting_node=replacing_node, replacing_node=None
)
else:
self._transplant(
deleting_node=replacing_node,
replacing_node=replacing_node.right,
)
replacing_node.right = deleting_node.right
replacing_node.right.parent = replacing_node
replacing_node.is_thread = False
self._transplant(
deleting_node=deleting_node, replacing_node=replacing_node
)
replacing_node.left = deleting_node.left
replacing_node.left.parent = replacing_node
if predecessor and predecessor.is_thread:
predecessor.right = replacing_node
else:
raise RuntimeError("Invalid case. Should never happened")
@staticmethod
def get_leftmost(node: Node) -> Node:
"""Return the leftmost node from a given subtree.
The key of the leftmost node is the smallest key in the given subtree.
Parameters
----------
node: `Node`
The root of the subtree.
Returns
-------
`Node`
The node whose key is the smallest from the subtree of
the given node.
"""
current_node = node
while current_node.left:
current_node = current_node.left
return current_node
@staticmethod
def get_rightmost(node: Node) -> Node:
"""Return the rightmost node from a given subtree.
The key of the rightmost node is the biggest key in the given subtree.
Parameters
----------
node: `Node`
The root of the subtree.
Returns
-------
`Node`
The node whose key is the biggest from the subtree of
the given node.
"""
current_node = node
while current_node.is_thread is False and current_node.right:
current_node = current_node.right
return current_node
@staticmethod
def get_successor(node: Node) -> Optional[Node]:
"""Return the successor in the in-order order.
Parameters
----------
node: `Node`
The node to get its successor.
Returns
-------
`Optional[Node]`
The successor node.
"""
if node.is_thread:
return node.right
else:
if node.right:
return RightThreadedBinaryTree.get_leftmost(node=node.right)
# if node.right is None, it means no successor of the given node.
return None
@staticmethod
def get_predecessor(node: Node) -> Optional[Node]:
"""Return the predecessor in the in-order order.
Parameters
----------
node: `Node`
The node to get its predecessor.
Returns
-------
`Optional[Node]`
The predecessor node.
"""
if node.left:
return RightThreadedBinaryTree.get_rightmost(node=node.left)
parent = node.parent
while parent and node == parent.left:
node = parent
parent = parent.parent
return parent
@staticmethod
def get_height(node: Optional[Node]) -> int:
"""Get the height of the given subtree.
Parameters
----------
node: `Optional[Node]`
The root of the subtree to get its height.
Returns
-------
`int`
The height of the given subtree. 0 if the subtree has only one node.
"""
if node:
if node.left and node.is_thread is False:
return (
max(
RightThreadedBinaryTree.get_height(node.left),
RightThreadedBinaryTree.get_height(node.right),
)
+ 1
)
if node.left:
return RightThreadedBinaryTree.get_height(node=node.left) + 1
if node.is_thread is False:
return RightThreadedBinaryTree.get_height(node=node.right) + 1
return 0
def inorder_traverse(self) -> traversal.Pairs:
"""Use the right threads to traverse the tree in in-order order.
Yields
------
`Pairs`
The next (key, data) pair in the tree in-order traversal.
"""
if self.root:
current: Optional[Node] = self.get_leftmost(node=self.root)
while current:
yield (current.key, current.data)
if current.is_thread:
current = current.right
else:
if current.right is None:
break
current = self.get_leftmost(current.right)
def preorder_traverse(self) -> traversal.Pairs:
"""Use the right threads to traverse the tree in pre-order order.
Yields
------
`Pairs`
The next (key, data) pair in the tree pre-order traversal.
"""
current = self.root
while current:
yield (current.key, current.data)
if current.is_thread:
# If a node is thread, it must have a right child.
current = current.right.right # type: ignore
else:
current = current.left
def _transplant(self, deleting_node: Node, replacing_node: Optional[Node]) -> None:
if deleting_node.parent is None:
self.root = replacing_node
if self.root:
self.root.is_thread = False
elif deleting_node == deleting_node.parent.left:
deleting_node.parent.left = replacing_node
if replacing_node:
if deleting_node.is_thread:
if replacing_node.is_thread:
replacing_node.right = replacing_node.right
else: # deleting_node == deleting_node.parent.right
deleting_node.parent.right = replacing_node
if replacing_node:
if deleting_node.is_thread:
if replacing_node.is_thread:
replacing_node.right = replacing_node.right
else:
deleting_node.parent.right = deleting_node.right
deleting_node.parent.is_thread = True
if replacing_node:
replacing_node.parent = deleting_node.parent
class LeftThreadedBinaryTree:
"""Left Threaded Binary Tree.
Attributes
----------
root: `Optional[Node]`
The root node of the left threaded binary search tree.
empty: `bool`
`True` if the tree is empty; `False` otherwise.
Methods
-------
Core Functions
search(key: `Any`)
Look for a node based on the given key.
insert(key: `Any`, data: `Any`)
Insert a (key, data) pair into a binary tree.
delete(key: `Any`)
Delete a node based on the given key from the binary tree.
Auxiliary Functions
get_leftmost(node: `Node`)
Return the node whose key is the smallest from the given subtree.
get_rightmost(node: `Node` = `None`)
Return the node whose key is the biggest from the given subtree.
get_successor(node: `Node`)
Return the successor node in the in-order order.
get_predecessor(node: `Node`)
Return the predecessor node in the in-order order.
get_height(node: `Optional[Node]`)
Return the height of the given node.
Traversal Function
reverse_inorder_traverse()
Reversed In-order traversal by using the left threads.
"""
def __init__(self) -> None:
self.root: Optional[Node] = None
def __repr__(self) -> str:
"""Provie the tree representation to visualize its layout."""
if self.root:
return (
f"{type(self)}, root={self.root}, "
f"tree_height={str(self.get_height(self.root))}"
)
return "empty tree"
@property
def empty(self) -> bool:
"""bool: `True` if the tree is empty; `False` otherwise.
Notes
-----
The property, `empty`, is read-only.
"""
return self.root is None
def search(self, key: Any) -> Optional[Node]:
"""Look for a node by a given key.
Parameters
----------
key: `Any`
The key associated with the node.
Returns
-------
`Optional[Node]`
The node found by the given key.
If the key does not exist, return `None`.
"""
current = self.root
while current:
if key == current.key:
return current
elif key < current.key:
if current.is_thread is False:
current = current.left
else:
break
else: # key > current.key:
current = current.right
return None
def insert(self, key: Any, data: Any) -> None:
"""Insert a (key, data) pair into the binary search tree.
Parameters
----------
key: `Any`
The key associated with the data.
data: `Any`
The data to be inserted.
Raises
------
`DuplicateKeyError`
Raised if the key to be insted has existed in the tree.
"""
new_node = Node(key=key, data=data)
parent: Optional[Node] = None
current: Optional[Node] = self.root
while current:
parent = current
if new_node.key < current.key:
# If the node is thread, meaning it's a leaf node.
if current.is_thread:
current = None
else:
current = current.left
elif new_node.key > current.key:
current = current.right
else:
raise tree_exceptions.DuplicateKeyError(key=new_node.key)
new_node.parent = parent
# If the tree is empty
if parent is None:
self.root = new_node
elif new_node.key > parent.key:
parent.right = new_node
# Update thread
new_node.left = parent
new_node.is_thread = True
else:
# Update thread
new_node.is_thread = parent.is_thread
new_node.left = parent.left
parent.is_thread = False
# Parent's left must be set after thread update
parent.left = new_node
def delete(self, key: Any) -> None:
"""Delete a node according to the given key.
Parameters
----------
key: `Any`
The key of the node to be deleted.
"""
if self.root and (deleting_node := self.search(key=key)):
# Case 1: no child
if deleting_node.right is None and (
deleting_node.left is None or deleting_node.is_thread
):
self._transplant(deleting_node=deleting_node, replacing_node=None)
# Case 2a: only one left child
elif (deleting_node.right is None) and (deleting_node.is_thread is False):
self._transplant(
deleting_node=deleting_node, replacing_node=deleting_node.left
)
# Case 2b: only one right child
elif deleting_node.right and (
deleting_node.is_thread
or deleting_node.left is None
# deleting_node.left is None means the deleting node is the root.
):
successor = self.get_successor(node=deleting_node)
if successor:
successor.left = deleting_node.left
self._transplant(
deleting_node=deleting_node, replacing_node=deleting_node.right
)
# Case 3: two children
elif deleting_node.right and deleting_node.left:
replacing_node: Node = self.get_leftmost(node=deleting_node.right)
successor = self.get_successor(node=replacing_node)
# the leftmost node is not the direct child of the deleting node
if replacing_node.parent != deleting_node:
self._transplant(
deleting_node=replacing_node,
replacing_node=replacing_node.right,
)
replacing_node.right = deleting_node.right
replacing_node.right.parent = replacing_node
self._transplant(
deleting_node=deleting_node, replacing_node=replacing_node
)
replacing_node.left = deleting_node.left
replacing_node.left.parent = replacing_node
replacing_node.is_thread = False
if successor and successor.is_thread:
successor.left = replacing_node
else:
raise RuntimeError("Invalid case. Should never happened")
@staticmethod
def get_leftmost(node: Node) -> Node:
"""Return the leftmost node from a given subtree.
The key of the leftmost node is the smallest key in the given subtree.
Parameters
----------
node: `Node`
The root of the subtree.
Returns
-------
`Node`
The node whose key is the smallest from the subtree of
the given node.
"""
current_node = node
while current_node.left and current_node.is_thread is False:
current_node = current_node.left
return current_node
@staticmethod
def get_rightmost(node: Node) -> Node:
"""Return the rightmost node from a given subtree.
The key of the rightmost node is the biggest key in the given subtree.
Parameters
----------
node: `Node`
The root of the subtree.
Returns
-------
`Node`
The node whose key is the biggest from the subtree of
the given node.
"""
current_node = node
while current_node.right:
current_node = current_node.right
return current_node
@staticmethod
def get_successor(node: Node) -> Optional[Node]:
"""Return the successor in the in-order order.
Parameters
----------
node: `Node`
The node to get its successor.
Returns
-------
`Optional[Node]`
The successor node.
"""
if node.right:
return LeftThreadedBinaryTree.get_leftmost(node=node.right)
parent = node.parent
while parent and node == parent.right:
node = parent
parent = parent.parent
return parent
@staticmethod
def get_predecessor(node: Node) -> Optional[Node]:
"""Return the predecessor in the in-order order.
Parameters
----------
node: `Node`
The node to get its predecessor.
Returns
-------
`Optional[Node]`
The predecessor node.
"""
if node.is_thread:
return node.left
else:
if node.left:
return LeftThreadedBinaryTree.get_rightmost(node=node.left)
# if node.left is None, it means no predecessor of the given node.
return None
@staticmethod
def get_height(node: Optional[Node]) -> int:
"""Get the height of the given subtree.
Parameters
----------
node: `Optional[Node]`
The root of the subtree to get its height.
Returns
-------
`int`
The height of the given subtree. 0 if the subtree has only one node.
"""
if node:
if node.right and node.is_thread is False:
return (
max(
LeftThreadedBinaryTree.get_height(node.left),
LeftThreadedBinaryTree.get_height(node.right),
)
+ 1
)
if node.right:
return LeftThreadedBinaryTree.get_height(node=node.right) + 1
if node.is_thread is False:
return LeftThreadedBinaryTree.get_height(node=node.left) + 1
return 0
def reverse_inorder_traverse(self) -> traversal.Pairs:
"""Use the left threads to traverse the tree in reversed in-order.
Yields
------
`Pairs`
The next (key, data) pair in the tree reversed in-order traversal.
"""
if self.root:
current: Optional[Node] = self.get_rightmost(node=self.root)
while current:
yield (current.key, current.data)
if current.is_thread:
current = current.left
else:
if current.left is None:
break
current = self.get_rightmost(current.left)
def _transplant(self, deleting_node: Node, replacing_node: Optional[Node]) -> None:
if deleting_node.parent is None:
self.root = replacing_node
if self.root:
self.root.is_thread = False
elif deleting_node == deleting_node.parent.left:
deleting_node.parent.left = replacing_node
if replacing_node:
if deleting_node.is_thread:
if replacing_node.is_thread:
replacing_node.left = deleting_node.left
else:
deleting_node.parent.left = deleting_node.left
deleting_node.parent.is_thread = True
else: # deleting_node == deleting_node.parent.right
deleting_node.parent.right = replacing_node
if replacing_node:
if deleting_node.is_thread:
if replacing_node.is_thread:
replacing_node.left = deleting_node.left
if replacing_node:
replacing_node.parent = deleting_node.parent
| 32.326898 | 87 | 0.54654 | 2,775 | 25,118 | 4.824865 | 0.062703 | 0.085145 | 0.029577 | 0.025245 | 0.896183 | 0.872657 | 0.822765 | 0.789305 | 0.753753 | 0.733064 | 0 | 0.001401 | 0.374672 | 25,118 | 776 | 88 | 32.368557 | 0.850958 | 0.304722 | 0 | 0.740541 | 0 | 0 | 0.016367 | 0.005754 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072973 | false | 0 | 0.010811 | 0 | 0.194595 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
23c2c853b1909952905413106beeb47782455d4e | 6,451 | py | Python | tests/test_ttt_computer.py | larryworm1127/tic-tac-toe-python | 327b43d36948fc41ef9c902f40c7f67fab793b89 | [
"MIT"
] | null | null | null | tests/test_ttt_computer.py | larryworm1127/tic-tac-toe-python | 327b43d36948fc41ef9c902f40c7f67fab793b89 | [
"MIT"
] | null | null | null | tests/test_ttt_computer.py | larryworm1127/tic-tac-toe-python | 327b43d36948fc41ef9c902f40c7f67fab793b89 | [
"MIT"
] | null | null | null | """
Test module for ttt_computer.py
"""
from ttt_game.ttt_computer import *
from ttt_game.ttt_board import *
def test_minimax_win_row() -> None:
"""
x x o | x o x | o o
x x | o o | x x o
o o | x x | x
Test if computer can win the game with win case on a row of the board.
"""
game_board = [[PLAYERX, PLAYERX, PLAYERO], [EMPTY, PLAYERX, PLAYERX],
[PLAYERO, EMPTY, PLAYERO]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 1, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERX, PLAYERO, PLAYERX], [EMPTY, PLAYERO, PLAYERO],
[PLAYERX, EMPTY, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 1, "Bad Move X: " + str(move[0])
assert move[1] == 0, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERO, EMPTY], [PLAYERX, PLAYERX, PLAYERO],
[PLAYERX, EMPTY, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 0, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
def test_minimax_win_col() -> None:
"""
x | x o o | o o x
o o x | o | o x
x o x | x o x | x
Test if computer can win the game with win case on a column of the board.
"""
game_board = [[PLAYERX, EMPTY, EMPTY], [PLAYERO, PLAYERO, PLAYERX],
[PLAYERX, PLAYERO, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERX)
assert move[0] == 0, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERX, PLAYERO, PLAYERO], [EMPTY, EMPTY, PLAYERO],
[PLAYERX, PLAYERO, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERX)
assert move[0] == 1, "Bad Move X: " + str(move[0])
assert move[1] == 0, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERO, PLAYERX], [EMPTY, PLAYERO, PLAYERX],
[PLAYERX, EMPTY, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERX)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
def test_minimax_win_diag() -> None:
"""
x x | o x x
o o x | x o
o | o
Test if computer can win the game with win case on a diagonal of the board.
"""
game_board = [[PLAYERX, PLAYERX, EMPTY], [PLAYERO, PLAYERO, PLAYERX],
[PLAYERO, EMPTY, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 0, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERX, PLAYERX], [PLAYERX, PLAYERO, EMPTY],
[PLAYERO, EMPTY, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
def test_minimax_def_row() -> None:
"""
x x | o x | x o
x o | x x | o
o o x | o o x | x x
Test if computer can defend with a opponent win case on a row of the board.
"""
game_board = [[PLAYERX, PLAYERX, EMPTY], [PLAYERX, PLAYERO, EMPTY],
[PLAYERO, PLAYERO, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 0, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERX, EMPTY], [PLAYERX, PLAYERX, EMPTY],
[PLAYERO, PLAYERO, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 1, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERX, PLAYERO, EMPTY], [PLAYERO, EMPTY, EMPTY],
[PLAYERX, PLAYERX, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
def test_minimax_def_col() -> None:
"""
x o x | o x o | o o x
x x o | x x o | x o x
o | x | x
Test if computer can defend with a opponent win case on a column of the
board.
"""
game_board = [[PLAYERX, PLAYERO, PLAYERX], [PLAYERX, PLAYERX, PLAYERO],
[EMPTY, EMPTY, PLAYERO]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 0, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERX, PLAYERO], [PLAYERX, PLAYERX, PLAYERO],
[EMPTY, EMPTY, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 1, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERO, PLAYERX], [PLAYERX, PLAYERO, PLAYERX],
[EMPTY, PLAYERX, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
def test_minimax_def_diag() -> None:
"""
x o x | o o x
x x o | x x o
o | x
Check if computer can defend with opponent win case on a diagonal of the
board.
"""
game_board = [[PLAYERX, PLAYERO, PLAYERX], [PLAYERX, PLAYERX, PLAYERO],
[PLAYERO, EMPTY, EMPTY]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 2, "Bad Move Y: " + str(move[1])
game_board = [[PLAYERO, PLAYERO, PLAYERX], [PLAYERX, PLAYERX, PLAYERO],
[EMPTY, EMPTY, PLAYERX]]
board = TTTBoard(3, _custom_board=game_board)
move = get_move(board, PLAYERO)
assert move[0] == 2, "Bad Move X: " + str(move[0])
assert move[1] == 0, "Bad Move Y: " + str(move[1])
if __name__ == '__main__':
import pytest
pytest.main(['test_ttt_computer.py'])
| 35.838889 | 79 | 0.578515 | 954 | 6,451 | 3.793501 | 0.052411 | 0.07958 | 0.085106 | 0.088422 | 0.934512 | 0.891406 | 0.856038 | 0.843051 | 0.836419 | 0.827024 | 0 | 0.023881 | 0.272981 | 6,451 | 179 | 80 | 36.039106 | 0.747761 | 0.126337 | 0 | 0.719626 | 0 | 0 | 0.075486 | 0 | 0 | 0 | 0 | 0 | 0.299065 | 1 | 0.056075 | false | 0 | 0.028037 | 0 | 0.084112 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f1975bb62e6d07bcb094be5904e32d28b229ac24 | 217 | py | Python | src/elastic/fortex/elastic/__init__.py | Piyush13y/forte-wrappers | 250df428a8705f769d53eb070f89c3f66e713015 | [
"Apache-2.0"
] | 3 | 2021-06-17T18:52:00.000Z | 2022-01-11T19:15:21.000Z | src/elastic/fortex/elastic/__init__.py | Piyush13y/forte-wrappers | 250df428a8705f769d53eb070f89c3f66e713015 | [
"Apache-2.0"
] | 66 | 2021-03-30T15:04:11.000Z | 2022-03-24T04:35:11.000Z | src/elastic/fortex/elastic/__init__.py | Piyush13y/forte-wrappers | 250df428a8705f769d53eb070f89c3f66e713015 | [
"Apache-2.0"
] | 10 | 2021-03-16T19:48:31.000Z | 2022-03-01T05:48:17.000Z | from fortex.elastic.elastic_search_processor import *
from fortex.elastic.elastic_indexer import *
from fortex.elastic.elastic_search_index_processor import *
from fortex.elastic.elastic_search_query_creator import *
| 43.4 | 59 | 0.870968 | 29 | 217 | 6.206897 | 0.344828 | 0.222222 | 0.377778 | 0.533333 | 0.833333 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073733 | 217 | 4 | 60 | 54.25 | 0.895522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
f1b198cb8ef82f1b30561e725dbb35523ecdd446 | 26,360 | py | Python | XKT/SKT/net.py | bigdata-ustc/XKT | b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c | [
"MIT"
] | 17 | 2019-09-11T12:00:05.000Z | 2022-03-30T04:41:05.000Z | XKT/SKT/net.py | bigdata-ustc/XKT | b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c | [
"MIT"
] | 1 | 2021-10-24T01:13:33.000Z | 2021-10-24T02:03:26.000Z | XKT/SKT/net.py | bigdata-ustc/XKT | b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c | [
"MIT"
] | 6 | 2019-09-13T07:50:07.000Z | 2022-03-12T00:22:11.000Z | # coding: utf-8
# 2021/8/22 @ tongshiwei
from baize.mxnet.utils import format_sequence, mask_sequence_variable_length
from mxnet import gluon
import mxnet as mx
from XKT.utils.nn import GRUCell, begin_states, get_states, expand_tensor
from .utils import Graph
def get_net(ku_num, graph_params=None, net_type="SKT", k=2, **kwargs):
if net_type == "SKT":
return SKT(ku_num, graph_params, **kwargs)
elif net_type == "SKT_TE":
return SKT_TE(ku_num, **kwargs)
elif net_type == "SKTPart":
return SKTPart(ku_num, graph_params, **kwargs)
elif net_type == "SKTSync":
return SKTSync(ku_num, graph_params, **kwargs)
else:
raise NotImplementedError
class SKT(gluon.Block):
def __init__(self, ku_num, graph_params=None,
alpha=0.5,
latent_dim=None, activation=None,
hidden_num=90, concept_dim=None,
# dropout=0.5, self_dropout=0.0,
dropout=0.0, self_dropout=0.5,
# dropout=0.0, self_dropout=0.0,
sync_activation="relu", sync_dropout=0.0,
prop_activation="relu", prop_dropout=0.0,
agg_activation="relu", agg_dropout=0.0,
prefix=None, params=None):
super(SKT, self).__init__(prefix=prefix, params=params)
self.ku_num = int(ku_num)
self.hidden_num = self.ku_num if hidden_num is None else int(hidden_num)
self.latent_dim = self.hidden_num if latent_dim is None else int(latent_dim)
self.concept_dim = self.hidden_num if concept_dim is None else int(concept_dim)
graph_params = graph_params if graph_params is not None else []
self.graph = Graph.from_file(ku_num, graph_params)
self.alpha = alpha
sync_activation = sync_activation if activation is None else activation
prop_activation = prop_activation if activation is None else activation
agg_activation = agg_activation if activation is None else activation
with self.name_scope():
self.rnn = GRUCell(self.hidden_num)
self.response_embedding = gluon.nn.Embedding(2 * self.ku_num, self.latent_dim)
self.concept_embedding = gluon.nn.Embedding(self.ku_num, self.concept_dim)
self.f_self = gluon.rnn.GRUCell(self.hidden_num)
# self.f_self = gluon.nn.Sequential()
# self.f_self.add(
# gluon.nn.Dense(self.hidden_num),
# gluon.nn.Activation("relu")
# )
self.self_dropout = gluon.nn.Dropout(self_dropout)
self.f_prop = gluon.nn.Sequential()
self.f_prop.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
gluon.nn.Activation(prop_activation),
gluon.nn.Dropout(prop_dropout),
)
self.f_sync = gluon.nn.Sequential()
self.f_sync.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
gluon.nn.Activation(sync_activation),
gluon.nn.Dropout(sync_dropout),
)
self.f_agg = gluon.nn.Sequential()
self.f_agg.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
# gluon.nn.InstanceNorm(),
# gluon.nn.LayerNorm(),
# gluon.nn.BatchNorm(),
gluon.nn.Activation(agg_activation),
gluon.nn.Dropout(agg_dropout),
)
self.dropout = gluon.nn.Dropout(dropout)
self.out = gluon.nn.Dense(1, flatten=False)
def neighbors(self, x, ordinal=True):
return self.graph.neighbors(x, ordinal)
def successors(self, x, ordinal=True):
return self.graph.successors(x, ordinal)
def forward(self, questions, answers, valid_length=None, states=None, layout='NTC', compressed_out=True,
*args, **kwargs):
ctx = questions.context
length = questions.shape[1]
inputs, axis, F, batch_size = format_sequence(length, questions, layout, False)
answers, _, _, _ = format_sequence(length, answers, layout, False)
if states is None:
states = begin_states([(batch_size, self.ku_num, self.hidden_num)], self.prefix)[0]
states = states.as_in_context(ctx)
outputs = []
all_states = []
for i in range(length):
# self - influence
_self_state = get_states(inputs[i], states)
# fc
# _next_self_state = self.f_self(mx.nd.concat(_self_state, self.response_embedding(answers[i]), dim=-1))
# gru
_next_self_state, _ = self.f_self(self.response_embedding(answers[i]), [_self_state])
# _next_self_state = self.f_self(mx.nd.concat(_self_hidden_states, _self_state))
# _next_self_state, _ = self.f_self(_self_hidden_states, [_self_state])
_next_self_state = self.self_dropout(_next_self_state)
# get self mask
_self_mask = mx.nd.expand_dims(mx.nd.one_hot(inputs[i], self.ku_num), -1)
_self_mask = mx.nd.broadcast_to(_self_mask, (0, 0, self.hidden_num))
# find neighbors
_neighbors = self.neighbors(inputs[i])
_neighbors_mask = mx.nd.expand_dims(mx.nd.array(_neighbors, ctx=ctx), -1)
_neighbors_mask = mx.nd.broadcast_to(_neighbors_mask, (0, 0, self.hidden_num))
# synchronization
_broadcast_next_self_states = mx.nd.expand_dims(_next_self_state, 1)
_broadcast_next_self_states = mx.nd.broadcast_to(_broadcast_next_self_states, (0, self.ku_num, 0))
# _sync_diff = mx.nd.concat(states, _broadcast_next_self_states, concept_embeddings, dim=-1)
_sync_diff = mx.nd.concat(states, _broadcast_next_self_states, dim=-1)
_sync_inf = _neighbors_mask * self.f_sync(_sync_diff)
# reflection on current vertex
_reflec_inf = mx.nd.sum(_sync_inf, axis=1)
_reflec_inf = mx.nd.broadcast_to(mx.nd.expand_dims(_reflec_inf, 1), (0, self.ku_num, 0))
_sync_inf = _sync_inf + _self_mask * _reflec_inf
# find successors
_successors = self.successors(inputs[i])
_successors_mask = mx.nd.expand_dims(mx.nd.array(_successors, ctx=ctx), -1)
_successors_mask = mx.nd.broadcast_to(_successors_mask, (0, 0, self.hidden_num))
# propagation
# _prop_diff = mx.nd.concat(_next_self_state - _self_state, self.concept_embedding(inputs[i]), dim=-1)
_prop_diff = _next_self_state - _self_state
# 1
_prop_inf = self.f_prop(_prop_diff)
_prop_inf = _successors_mask * mx.nd.broadcast_to(mx.nd.expand_dims(_prop_inf, axis=1), (0, self.ku_num, 0))
# 2
# _broadcast_diff = mx.nd.broadcast_to(mx.nd.expand_dims(_prop_diff, axis=1), (0, self.ku_num, 0))
# _pro_inf = _successors_mask * self.f_prop(
# mx.nd.concat(_broadcast_diff, concept_embeddings, dim=-1)
# )
# _pro_inf = _successors_mask * self.f_prop(
# _broadcast_diff
# )
# concept embedding
concept_embeddings = self.concept_embedding.weight.data(ctx)
concept_embeddings = expand_tensor(concept_embeddings, 0, batch_size)
# concept_embeddings = (_self_mask + _successors_mask + _neighbors_mask) * concept_embeddings
# aggregate
_inf = self.f_agg(self.alpha * _sync_inf + (1 - self.alpha) * _prop_inf)
# next_states, _ = self.rnn(_inf, [states])
next_states, _ = self.rnn(mx.nd.concat(_inf, concept_embeddings, dim=-1), [states])
# states = (1 - _self_mask) * next_states + _self_mask * _broadcast_next_self_states
states = next_states
output = mx.nd.sigmoid(mx.nd.squeeze(self.out(self.dropout(states)), axis=-1))
outputs.append(output)
if valid_length is not None and not compressed_out:
all_states.append([states])
if valid_length is not None:
if compressed_out:
states = None
else:
states = [mx.nd.SequenceLast(mx.nd.stack(*ele_list, axis=0),
sequence_length=valid_length,
use_sequence_length=True,
axis=0)
for ele_list in zip(*all_states)]
outputs = mask_sequence_variable_length(mx.nd, outputs, length, valid_length, axis, True)
outputs, _, _, _ = format_sequence(length, outputs, layout, merge=True)
return outputs, states
class SKTPart(SKT):
def __init__(self, ku_num, graph_params=None,
latent_dim=None, activation=None,
hidden_num=90, concept_dim=None,
dropout=0.0, self_dropout=0.0,
prop_activation="relu", prop_dropout=0.0,
prefix=None, params=None):
super(SKT, self).__init__(prefix=prefix, params=params)
self.ku_num = int(ku_num)
self.hidden_num = self.ku_num if hidden_num is None else int(hidden_num)
self.latent_dim = self.hidden_num if latent_dim is None else int(latent_dim)
self.concept_dim = self.hidden_num if concept_dim is None else int(concept_dim)
graph_params = graph_params if graph_params is not None else []
self.graph = Graph.from_file(ku_num, graph_params)
prop_activation = prop_activation if activation is None else activation
with self.name_scope():
self.rnn = GRUCell(self.hidden_num)
self.response_embedding = gluon.nn.Embedding(2 * self.ku_num, self.latent_dim)
self.concept_embedding = gluon.nn.Embedding(self.ku_num, self.concept_dim)
self.f_self = gluon.rnn.GRUCell(self.hidden_num)
# self.f_self = gluon.nn.Sequential()
# self.f_self.add(
# gluon.nn.Dense(self.hidden_num),
# gluon.nn.Activation("relu")
# )
self.self_dropout = gluon.nn.Dropout(self_dropout)
self.f_prop = gluon.nn.Sequential()
self.f_prop.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
gluon.nn.Activation(prop_activation),
gluon.nn.Dropout(prop_dropout),
)
# self.f_sync = gluon.nn.Sequential()
# self.f_sync.add(
# gluon.nn.Dense(self.hidden_num, flatten=False),
# gluon.nn.Activation(sync_activation),
# gluon.nn.Dropout(sync_dropout),
# )
# self.f_reflec = gluon.nn.Sequential()
# self.f_reflec.add(
# gluon.nn.Dense(self.hidden_num, flatten=False),
# gluon.nn.Activation(sync_activation),
# gluon.nn.Dropout(sync_dropout),
# )
# self.f_agg = gluon.nn.Sequential()
# self.f_agg.add(
# gluon.nn.Dense(self.hidden_num, flatten=False),
# # gluon.nn.InstanceNorm(),
# # gluon.nn.LayerNorm(),
# # gluon.nn.BatchNorm(),
# gluon.nn.Activation(agg_activation),
# gluon.nn.Dropout(agg_dropout),
# )
self.dropout = gluon.nn.Dropout(dropout)
self.out = gluon.nn.Dense(1, flatten=False)
def forward(self, questions, answers, valid_length=None, states=None, layout='NTC', compressed_out=True,
*args, **kwargs):
ctx = questions.context
length = questions.shape[1]
inputs, axis, F, batch_size = format_sequence(length, questions, layout, False)
answers, _, _, _ = format_sequence(length, answers, layout, False)
if states is None:
states = begin_states([(batch_size, self.ku_num, self.hidden_num)], self.prefix)[0]
states = states.as_in_context(ctx)
outputs = []
all_states = []
for i in range(length):
# self - influence
_self_state = get_states(inputs[i], states)
# fc
# _next_self_state = self.f_self(mx.nd.concat(_self_state, self.response_embedding(answers[i]), dim=-1))
# gru
_next_self_state, _ = self.f_self(self.response_embedding(answers[i]), [_self_state])
# _next_self_state = self.f_self(mx.nd.concat(_self_hidden_states, _self_state))
# _next_self_state, _ = self.f_self(_self_hidden_states, [_self_state])
_next_self_state = self.self_dropout(_next_self_state)
# get self mask
_self_mask = mx.nd.expand_dims(mx.nd.one_hot(inputs[i], self.ku_num), -1)
_self_mask = mx.nd.broadcast_to(_self_mask, (0, 0, self.hidden_num))
# self-concept embedding
# _self_concept_embedding = self.concept_embedding(inputs[i])
# _broadcast_self_concept_embedding = mx.nd.expand_dims(_self_concept_embedding, dim=1)
# _broadcast_self_concept_embedding = mx.nd.broadcast_to(_broadcast_self_concept_embedding,
# (0, self.ku_num, 0))
# concept embedding
concept_embeddings = self.concept_embedding.weight.data(ctx)
concept_embeddings = expand_tensor(concept_embeddings, 0, batch_size)
# concept_embeddings = (_self_mask + _successors_mask + _neighbors_mask) * concept_embeddings
# find successors
_successors = self.successors(inputs[i])
_successors_mask = mx.nd.expand_dims(mx.nd.array(_successors, ctx=ctx), -1)
_successors_mask = mx.nd.broadcast_to(_successors_mask, (0, 0, self.hidden_num))
_broadcast_next_self_states = mx.nd.expand_dims(_next_self_state, 1)
_broadcast_next_self_states = mx.nd.broadcast_to(_broadcast_next_self_states, (0, self.ku_num, 0))
# propagation
# _prop_diff = mx.nd.concat(_next_self_state - _self_state, self.concept_embedding(inputs[i]), dim=-1)
_prop_diff = _next_self_state - _self_state
# 1
_prop_inf = self.f_prop(
mx.nd.concat(mx.nd.broadcast_to(mx.nd.expand_dims(_prop_diff, axis=1), (0, self.ku_num, 0)),
concept_embeddings, dim=-1))
_prop_inf = _successors_mask * _prop_inf
# aggregate
# _inf = self.f_agg(_prop_inf)
_inf = _prop_inf
# next_states, _ = self.rnn(_inf, [states])
next_states, _ = self.rnn(_inf, [states])
updated = _successors_mask * next_states + _self_mask * _broadcast_next_self_states
states = updated + (1 - _successors_mask - _self_mask) * states
# states = next_states
output = mx.nd.sigmoid(mx.nd.squeeze(self.out(self.dropout(states)), axis=-1))
outputs.append(output)
if valid_length is not None and not compressed_out:
all_states.append([states])
if valid_length is not None:
if compressed_out:
states = None
else:
states = [mx.nd.SequenceLast(mx.nd.stack(*ele_list, axis=0),
sequence_length=valid_length,
use_sequence_length=True,
axis=0)
for ele_list in zip(*all_states)]
outputs = mask_sequence_variable_length(mx.nd, outputs, length, valid_length, axis, True)
outputs, _, _, _ = format_sequence(length, outputs, layout, merge=True)
return outputs, states
class SKT_TE(gluon.Block):
def __init__(self, ku_num,
latent_dim=None,
hidden_num=90, concept_dim=None,
dropout=0.0, self_dropout=0.5,
prefix=None, params=None):
super(SKT_TE, self).__init__(prefix=prefix, params=params)
self.ku_num = int(ku_num)
self.hidden_num = self.ku_num if hidden_num is None else int(hidden_num)
self.latent_dim = self.hidden_num if latent_dim is None else int(latent_dim)
self.concept_dim = self.hidden_num if concept_dim is None else int(concept_dim)
with self.name_scope():
self.response_embedding = gluon.nn.Embedding(2 * self.ku_num, self.latent_dim)
self.f_self = gluon.rnn.GRUCell(self.hidden_num)
self.self_dropout = gluon.nn.Dropout(self_dropout)
self.dropout = gluon.nn.Dropout(dropout)
self.out = gluon.nn.Dense(1, flatten=False)
def forward(self, questions, answers, valid_length=None, states=None, layout='NTC', compressed_out=True,
*args, **kwargs):
ctx = questions.context
length = questions.shape[1]
inputs, axis, F, batch_size = format_sequence(length, questions, layout, False)
answers, _, _, _ = format_sequence(length, answers, layout, False)
if states is None:
states = begin_states([(batch_size, self.ku_num, self.hidden_num)], self.prefix)[0]
states = states.as_in_context(ctx)
outputs = []
all_states = []
for i in range(length):
# self - influence
_self_state = get_states(inputs[i], states)
# fc
# _next_self_state = self.f_self(mx.nd.concat(_self_state, self.response_embedding(answers[i]), dim=-1))
# gru
_next_self_state, _ = self.f_self(self.response_embedding(answers[i]), [_self_state])
# _next_self_state = self.f_self(mx.nd.concat(_self_hidden_states, _self_state))
# _next_self_state, _ = self.f_self(_self_hidden_states, [_self_state])
_next_self_state = self.self_dropout(_next_self_state)
# get self mask
_self_mask = mx.nd.expand_dims(mx.nd.one_hot(inputs[i], self.ku_num), -1)
_self_mask = mx.nd.broadcast_to(_self_mask, (0, 0, self.hidden_num))
_broadcast_next_self_states = mx.nd.expand_dims(_next_self_state, 1)
_broadcast_next_self_states = mx.nd.broadcast_to(_broadcast_next_self_states, (0, self.ku_num, 0))
states = (1 - _self_mask) * states + _self_mask * _broadcast_next_self_states
output = mx.nd.sigmoid(mx.nd.squeeze(self.out(self.dropout(states)), axis=-1))
outputs.append(output)
if valid_length is not None and not compressed_out:
all_states.append([states])
if valid_length is not None:
if compressed_out:
states = None
else:
states = [mx.nd.SequenceLast(mx.nd.stack(*ele_list, axis=0),
sequence_length=valid_length,
use_sequence_length=True,
axis=0)
for ele_list in zip(*all_states)]
outputs = mask_sequence_variable_length(mx.nd, outputs, length, valid_length, axis, True)
outputs, _, _, _ = format_sequence(length, outputs, layout, merge=True)
return outputs, states
class SKTSync(SKT):
def __init__(self, ku_num, graph_params=None,
alpha=0.5,
latent_dim=None, activation=None,
hidden_num=90, concept_dim=None,
dropout=0.0, self_dropout=0.0,
sync_activation="relu", sync_dropout=0.0,
prop_activation="relu", prop_dropout=0.0,
agg_activation="relu", agg_dropout=0.0,
prefix=None, params=None):
super(SKT, self).__init__(prefix=prefix, params=params)
self.ku_num = int(ku_num)
self.hidden_num = self.ku_num if hidden_num is None else int(hidden_num)
self.latent_dim = self.hidden_num if latent_dim is None else int(latent_dim)
self.concept_dim = self.hidden_num if concept_dim is None else int(concept_dim)
graph_params = graph_params if graph_params is not None else []
self.graph = Graph.from_file(ku_num, graph_params)
self.alpha = alpha
sync_activation = sync_activation if activation is None else activation
with self.name_scope():
self.rnn = GRUCell(self.hidden_num)
self.response_embedding = gluon.nn.Embedding(2 * self.ku_num, self.latent_dim)
self.concept_embedding = gluon.nn.Embedding(self.ku_num, self.concept_dim)
self.f_self = gluon.rnn.GRUCell(self.hidden_num)
# self.f_self = gluon.nn.Sequential()
# self.f_self.add(
# gluon.nn.Dense(self.hidden_num),
# gluon.nn.Activation("relu")
# )
self.self_dropout = gluon.nn.Dropout(self_dropout)
self.f_sync = gluon.nn.Sequential()
self.f_sync.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
gluon.nn.Activation(sync_activation),
gluon.nn.Dropout(sync_dropout),
)
self.f_reflec = gluon.nn.Sequential()
self.f_reflec.add(
gluon.nn.Dense(self.hidden_num, flatten=False),
gluon.nn.Activation(sync_activation),
gluon.nn.Dropout(sync_dropout),
)
self.dropout = gluon.nn.Dropout(dropout)
self.out = gluon.nn.Dense(1, flatten=False)
def forward(self, questions, answers, valid_length=None, states=None, layout='NTC', compressed_out=True,
*args, **kwargs):
ctx = questions.context
length = questions.shape[1]
inputs, axis, F, batch_size = format_sequence(length, questions, layout, False)
answers, _, _, _ = format_sequence(length, answers, layout, False)
if states is None:
states = begin_states([(batch_size, self.ku_num, self.hidden_num)], self.prefix)[0]
states = states.as_in_context(ctx)
outputs = []
all_states = []
for i in range(length):
# self - influence
_self_state = get_states(inputs[i], states)
# fc
# _next_self_state = self.f_self(mx.nd.concat(_self_state, self.response_embedding(answers[i]), dim=-1))
# gru
_next_self_state, _ = self.f_self(self.response_embedding(answers[i]), [_self_state])
# _next_self_state = self.f_self(mx.nd.concat(_self_hidden_states, _self_state))
# _next_self_state, _ = self.f_self(_self_hidden_states, [_self_state])
_next_self_state = self.self_dropout(_next_self_state)
# get self mask
_self_mask = mx.nd.expand_dims(mx.nd.one_hot(inputs[i], self.ku_num), -1)
_self_mask = mx.nd.broadcast_to(_self_mask, (0, 0, self.hidden_num))
# self-concept embedding
_self_concept_embedding = self.concept_embedding(inputs[i])
# _broadcast_self_concept_embedding = mx.nd.expand_dims(_self_concept_embedding, dim=1)
# _broadcast_self_concept_embedding = mx.nd.broadcast_to(_broadcast_self_concept_embedding,
# (0, self.ku_num, 0))
# concept embedding
concept_embeddings = self.concept_embedding.weight.data(ctx)
concept_embeddings = expand_tensor(concept_embeddings, 0, batch_size)
# concept_embeddings = (_self_mask + _successors_mask + _neighbors_mask) * concept_embeddings
# find neighbors
_neighbors = self.neighbors(inputs[i])
_neighbors_mask = mx.nd.expand_dims(mx.nd.array(_neighbors, ctx=ctx), -1)
_neighbors_mask = mx.nd.broadcast_to(_neighbors_mask, (0, 0, self.hidden_num))
# synchronization
_broadcast_next_self_states = mx.nd.expand_dims(_next_self_state, 1)
_broadcast_next_self_states = mx.nd.broadcast_to(_broadcast_next_self_states, (0, self.ku_num, 0))
# _sync_diff = mx.nd.concat(states, _broadcast_next_self_states, concept_embeddings, dim=-1)
_sync_diff = mx.nd.concat(states, _broadcast_next_self_states, dim=-1)
_sync_inf = _neighbors_mask * self.f_sync(
mx.nd.concat(_sync_diff, concept_embeddings, dim=-1)
)
# reflection on current vertex
_reflec_diff = mx.nd.concat(mx.nd.sum(_neighbors_mask * states, axis=1) + _next_self_state,
_self_concept_embedding, dim=-1)
# _reflec_diff = mx.nd.concat(mx.nd.sum(_neighbors_mask * states, axis=1), _next_self_state,
# _self_concept_embedding, dim=-1)
_reflec_inf = self.f_reflec(_reflec_diff)
_reflec_inf = mx.nd.broadcast_to(mx.nd.expand_dims(_reflec_inf, 1), (0, self.ku_num, 0))
_sync_inf = _sync_inf + _self_mask * _reflec_inf
# aggregate
_inf = _sync_inf
next_states, _ = self.rnn(_inf, [states])
states = (_neighbors_mask + _self_mask) * next_states + (1 - _neighbors_mask - _self_mask) * states
# states = next_states
output = mx.nd.sigmoid(mx.nd.squeeze(self.out(self.dropout(states)), axis=-1))
outputs.append(output)
if valid_length is not None and not compressed_out:
all_states.append([states])
if valid_length is not None:
if compressed_out:
states = None
else:
states = [mx.nd.SequenceLast(mx.nd.stack(*ele_list, axis=0),
sequence_length=valid_length,
use_sequence_length=True,
axis=0)
for ele_list in zip(*all_states)]
outputs = mask_sequence_variable_length(mx.nd, outputs, length, valid_length, axis, True)
outputs, _, _, _ = format_sequence(length, outputs, layout, merge=True)
return outputs, states
| 50.40153 | 120 | 0.600948 | 3,266 | 26,360 | 4.514697 | 0.048071 | 0.024144 | 0.037911 | 0.029976 | 0.942421 | 0.928518 | 0.923228 | 0.905934 | 0.901458 | 0.894744 | 0 | 0.009399 | 0.297724 | 26,360 | 522 | 121 | 50.498084 | 0.787111 | 0.171965 | 0 | 0.81686 | 0 | 0 | 0.003041 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031977 | false | 0 | 0.014535 | 0.005814 | 0.087209 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f1d5fa9e9f93ced1bd2634dfa6e734849ffc9cf4 | 7,537 | py | Python | muon.py | avain/DeepLearningTutorial | 423de3f4595a357167e01e9145b5147c611976bc | [
"MIT"
] | 1 | 2018-07-03T08:30:24.000Z | 2018-07-03T08:30:24.000Z | muon.py | avain/DeepLearningTutorial | 423de3f4595a357167e01e9145b5147c611976bc | [
"MIT"
] | null | null | null | muon.py | avain/DeepLearningTutorial | 423de3f4595a357167e01e9145b5147c611976bc | [
"MIT"
] | null | null | null | from mxnet import gluon,nd,autograd,metric
import matplotlib.pyplot as plt
import numpy as np
class CreateModel(gluon.nn.Block):
def __init__(self,layer,ctx,precision, **kwargs):
super(CreateModel, self).__init__(**kwargs)
self.layer=layer
self.precision=precision
self.ctx=ctx
self.layer.cast(self.precision)
def grad_check_first_layer(self):
print( self.layer[0].weight )
print( self.layer[0].weight.grad().sum() )
def forward(self,x):
#print(x)
return self.layer(x)
def fit(self,train_gen,test_gen,epochs,print_every,
loss_with_softmax,optimizer):
trainer=gluon.Trainer(params=self.collect_params(),
optimizer=optimizer)
# Initialize some objects for the metrics
acc=metric.Accuracy()
train_acc_records=[]
test_acc_records=[]
loss_records=[]
for e in range(epochs):
for i,(data,label) in enumerate(train_gen):
data=data.as_in_context(self.ctx).astype(self.precision)
label=label.as_in_context(self.ctx).astype(np.float32)
with autograd.record():
label_linear=self.layer(data)
label_linear=label_linear.astype(np.float32) # Improve accuracy, as suggested in nVIDIA's SDK.
loss=loss_with_softmax(label_linear,label)
loss.backward()
trainer.step(batch_size=128)
# Print the metrics every several iterations.
if (i%print_every==0): # print metrics for train (current batch) & test data.
label_pred = nd.argmax( nd.softmax(label_linear ), axis=1)
acc.reset()
acc.update(preds=label_pred, labels=label)
train_acc=acc.get()[1]
test_acc =self.evaluate_accuracy(test_gen, self.layer)
train_acc_records.append(train_acc)
test_acc_records.append(test_acc)
curr_loss = nd.mean(loss).asscalar()
loss_records.append(curr_loss)
print("epoch=%2s, iter=%5d, loss=%10f, train acc=%10f, test_acc=%10f"%(e,i,curr_loss,train_acc,test_acc))
# Visialize the calculated metrics of accuracy during of training.
self.viz_training(train_acc_records,test_acc_records,loss_records)
def evaluate_accuracy(self,data_iterator, net):
'''Given model and data, the model accuracy will be calculated.'''
acc = metric.Accuracy()
for i, (data, label) in enumerate(data_iterator):
data = data.as_in_context(self.ctx).astype(self.precision)
label = label.as_in_context(self.ctx).astype(self.precision)
output = net(data)
predictions = nd.argmax(output, axis=1)
acc.update(preds=predictions, labels=label)
return acc.get()[1]
def viz_training(self,train_acc_records,test_acc_records,loss_records):
"""show how the metrics such as loss and model accuracy varies in the progress of training"""
fig,axes=plt.subplots(1,2,figsize=(18,6),dpi=120)
axes[0].plot(train_acc_records,ms=5,marker='o',label='train acc',ls='--')
axes[0].plot(test_acc_records,ms=5,marker='o',label='val acc',ls='--')
axes[0].legend()
axes[1].plot(loss_records,ms=5,marker='o',label='train loss',ls='--')
axes[1].legend()
for idx,ax in enumerate(axes):
ax.set_xlabel('Epoch')
if idx==0:
ax.set_ylabel('Accuracy')
else:
ax.set_ylabel('Loss')
plt.show()
class CreateHybridModel(gluon.nn.HybridBlock):
def __init__(self,layer,ctx,precision, **kwargs):
super(CreateHybridModel, self).__init__(**kwargs)
self.layer=layer
self.precision=precision
self.ctx=ctx
self.layer.cast(self.precision)
def grad_check_first_layer(self):
print( self.layer[0].weight )
print( self.layer[0].weight.grad().sum() )
def hybrid_forward(self,F,x):
#print(x)
return self.layer(x)
def fit(self,train_gen,test_gen,epochs,print_every,
loss_with_softmax,optimizer):
trainer=gluon.Trainer(params=self.collect_params(),
optimizer=optimizer)
# Initialize some objects for the metrics
acc=metric.Accuracy()
train_acc_records=[]
test_acc_records=[]
loss_records=[]
for e in range(epochs):
for i,(data,label) in enumerate(train_gen):
data=data.as_in_context(self.ctx).astype(self.precision)
label=label.as_in_context(self.ctx).astype(np.float32)
with autograd.record():
label_linear=self.layer(data)
label_linear=label_linear.astype(np.float32) # Improve accuracy, as suggested in nVIDIA's SDK.
loss=loss_with_softmax(label_linear,label)
loss.backward()
trainer.step(batch_size=128)
# Print the metrics every several iterations.
if (i%print_every==0): # print metrics for train (current batch) & test data.
label_pred = nd.argmax( nd.softmax(label_linear ), axis=1)
acc.reset()
acc.update(preds=label_pred, labels=label)
train_acc=acc.get()[1]
test_acc =self.evaluate_accuracy(test_gen, self.layer)
train_acc_records.append(train_acc)
test_acc_records.append(test_acc)
curr_loss = nd.mean(loss).asscalar()
loss_records.append(curr_loss)
print("epoch=%2s, iter=%5d, loss=%10f, train acc=%10f, test_acc=%10f"%(e,i,curr_loss,train_acc,test_acc))
# Visialize the calculated metrics of accuracy during of training.
self.viz_training(train_acc_records,test_acc_records,loss_records)
def evaluate_accuracy(self,data_iterator, net):
'''Given model and data, the model accuracy will be calculated.'''
acc = metric.Accuracy()
for i, (data, label) in enumerate(data_iterator):
data = data.as_in_context(self.ctx).astype(self.precision)
label = label.as_in_context(self.ctx).astype(self.precision)
output = net(data)
predictions = nd.argmax(output, axis=1)
acc.update(preds=predictions, labels=label)
return acc.get()[1]
def viz_training(self,train_acc_records,test_acc_records,loss_records):
"""show how the metrics such as loss and model accuracy varies in the progress of training"""
fig,axes=plt.subplots(1,2,figsize=(18,6),dpi=120)
axes[0].plot(train_acc_records,ms=5,marker='o',label='train acc',ls='--')
axes[0].plot(test_acc_records,ms=5,marker='o',label='val acc',ls='--')
axes[0].legend()
axes[1].plot(loss_records,ms=5,marker='o',label='train loss',ls='--')
axes[1].legend()
for idx,ax in enumerate(axes):
ax.set_xlabel('Epoch')
if idx==0:
ax.set_ylabel('Accuracy')
else:
ax.set_ylabel('Loss')
plt.show() | 40.521505 | 125 | 0.587502 | 942 | 7,537 | 4.525478 | 0.159236 | 0.037532 | 0.035186 | 0.028149 | 0.952381 | 0.952381 | 0.952381 | 0.952381 | 0.934084 | 0.934084 | 0 | 0.014736 | 0.297731 | 7,537 | 186 | 126 | 40.521505 | 0.790667 | 0.108001 | 0 | 0.932331 | 0 | 0 | 0.033772 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090226 | false | 0 | 0.022556 | 0.015038 | 0.157895 | 0.075188 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7b25b32d34f39e82f502f67be69100c1ed37da03 | 2,619 | py | Python | tests/parser/choice.48.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/choice.48.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/choice.48.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
% This is a synthetic example documenting a bug in an early version of DLV's
% backjumping algorithm.
% The abstract computation tree looks as follows (choice order should be fixed
% by disabling heuristics with -OH-):
%
% o
% a / \ -a
% / \_..._
% o \
% b / \ -b {-a,-b,f}
% / \
% o o
% incons incons based on a and b
% based
% only
% on b
%
% The backjumping algorithm wrongly determined that in the bottom left
% subtree both inconsistencies are based only on the choice of b and
% therefore stopped the entire search, missing the model on the right.
a | -a.
b | -b.
% taking b causes inconsistency
x :- b.
y :- b.
:- x,y.
% taking -b causes m1 to be false, but only with a
% taking -b unconditionally causes d to be false
:- -b, a, m1.
:- -b, d.
% falsity of m1 and d causes violation of the following constraint
% the reasons are obviously the choice for b and the choice for a
:- not m1, not d.
% give m1 a chance to be true
% if not allow a model with g (which does not exist as m1 will be false there
% but together with -b it causes inconsistency, and taking b also entails
% inconsistency)
m1 | g.
% avoid d to be always false
% and allow a model with f
d | f.
"""
output = """
% This is a synthetic example documenting a bug in an early version of DLV's
% backjumping algorithm.
% The abstract computation tree looks as follows (choice order should be fixed
% by disabling heuristics with -OH-):
%
% o
% a / \ -a
% / \_..._
% o \
% b / \ -b {-a,-b,f}
% / \
% o o
% incons incons based on a and b
% based
% only
% on b
%
% The backjumping algorithm wrongly determined that in the bottom left
% subtree both inconsistencies are based only on the choice of b and
% therefore stopped the entire search, missing the model on the right.
a | -a.
b | -b.
% taking b causes inconsistency
x :- b.
y :- b.
:- x,y.
% taking -b causes m1 to be false, but only with a
% taking -b unconditionally causes d to be false
:- -b, a, m1.
:- -b, d.
% falsity of m1 and d causes violation of the following constraint
% the reasons are obviously the choice for b and the choice for a
:- not m1, not d.
% give m1 a chance to be true
% if not allow a model with g (which does not exist as m1 will be false there
% but together with -b it causes inconsistency, and taking b also entails
% inconsistency)
m1 | g.
% avoid d to be always false
% and allow a model with f
d | f.
"""
| 24.942857 | 79 | 0.626575 | 418 | 2,619 | 3.916268 | 0.227273 | 0.034209 | 0.026878 | 0.036652 | 0.99328 | 0.99328 | 0.99328 | 0.99328 | 0.99328 | 0.99328 | 0 | 0.007531 | 0.290187 | 2,619 | 104 | 80 | 25.182692 | 0.87305 | 0 | 0 | 0.930233 | 0 | 0 | 0.987694 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9e61263f3416959aa32c25a1d8c393ed8fa31248 | 2,488 | py | Python | unit_tests_evolutionary.py | CLAHRCWessex/SymmetricTSP | 2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4 | [
"MIT"
] | 1 | 2020-06-01T22:56:11.000Z | 2020-06-01T22:56:11.000Z | unit_tests_evolutionary.py | CLAHRCWessex/SymmetricTSP | 2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4 | [
"MIT"
] | null | null | null | unit_tests_evolutionary.py | CLAHRCWessex/SymmetricTSP | 2cfce4146ece0c784aa62f1b0e2ac1cb2e91b6c4 | [
"MIT"
] | null | null | null | '''
Unit tests for evolutionary algorithms module 'evolutionary.py'
Work in progress!
'''
import numpy as np
import pytest
from evolutionary import PartiallyMappedCrossover
def test_pmx_child_a():
test_seed = 101
np.random.seed(seed=test_seed)
#sets up swap between index 1 and 3 for child a
#sets up swap between index 3 and 5 for child b
parent_a = np.array([1, 2, 3, 4, 5, 12])
parent_b = np.array([2, 5, 12, 3, 4, 1])
#expected answer c_a:
#0: c_a = [1, 2, 3, 4, 5, 12]
#1: c_a = [1, 5, 3, 4, 2, 12]
#2: c_a = [1, 5, 12, 4, 2, 3]
#3: c_a = [1, 5, 12, 3, 2, 4]
#expected answer c_b:
#0: c_a = [2, 5, 12, 3, 4, 1]
#1: c_a = [2, 5, 12, 4, 3, 1]
#2: c_a = [2, 3, 12, 4, 5, 1]
#3: c_a = [2, 3, 1, 4, 5, 12]
expected_c_a = np.array([1, 5, 12, 3, 2, 4])
expected_c_b = np.array([2, 3, 1, 4, 5, 12])
print(np.sort(np.random.randint(0, len(parent_a), size = 2)))
print('before: {0} {1}'.format(parent_a, parent_b))
np.random.seed(seed=test_seed)
x_operator = PartiallyMappedCrossover()
c_a, c_b = x_operator.crossover(parent_a, parent_b)
print('children: {0} {1}'.format(c_a, c_b))
print('after: {0} {1}'.format(parent_a, parent_b))
assert np.array_equal(expected_c_a, c_a)
def test_pmx_child_b():
test_seed = 101
np.random.seed(seed=test_seed)
#sets up swap between index 1 and 3 for child a
#sets up swap between index 3 and 5 for child b
parent_a = np.array([1, 2, 3, 4, 5, 12])
parent_b = np.array([2, 5, 12, 3, 4, 1])
#expected answer c_a:
#0: c_a = [1, 2, 3, 4, 5, 12]
#1: c_a = [1, 5, 3, 4, 2, 12]
#2: c_a = [1, 5, 12, 4, 2, 3]
#3: c_a = [1, 5, 12, 3, 2, 4]
#expected answer c_b:
#0: c_a = [2, 5, 12, 3, 4, 1]
#1: c_a = [2, 5, 12, 4, 3, 1]
#2: c_a = [2, 3, 12, 4, 5, 1]
#3: c_a = [2, 3, 1, 4, 5, 12]
expected_c_a = np.array([1, 5, 12, 3, 2, 4])
expected_c_b = np.array([2, 3, 1, 4, 5, 12])
print(np.sort(np.random.randint(0, len(parent_a), size = 2)))
print('before: {0} {1}'.format(parent_a, parent_b))
np.random.seed(seed=test_seed)
x_operator = PartiallyMappedCrossover()
c_a, c_b = x_operator.crossover(parent_a, parent_b)
print('children: {0} {1}'.format(c_a, c_b))
print('after: {0} {1}'.format(parent_a, parent_b))
assert np.array_equal(expected_c_b, c_b)
if __name__ == '__main__':
test_pmx_child_a()
test_pmx_child_b()
| 26.189474 | 65 | 0.574357 | 484 | 2,488 | 2.762397 | 0.126033 | 0.038893 | 0.023934 | 0.017951 | 0.861631 | 0.839192 | 0.839192 | 0.839192 | 0.839192 | 0.839192 | 0 | 0.114561 | 0.249196 | 2,488 | 95 | 66 | 26.189474 | 0.601178 | 0.319534 | 0 | 0.722222 | 0 | 0 | 0.060132 | 0 | 0 | 0 | 0 | 0 | 0.055556 | 1 | 0.055556 | false | 0 | 0.083333 | 0 | 0.138889 | 0.222222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9e75fd895b35ad1bffe6960d3a4dd10f7527caae | 92 | py | Python | retropie_game_editor/routes/__init__.py | alanquillin/retropie_game_editor | 6bfced066394ea3dc3504cf50af4dd25eb2366bf | [
"Apache-2.0"
] | null | null | null | retropie_game_editor/routes/__init__.py | alanquillin/retropie_game_editor | 6bfced066394ea3dc3504cf50af4dd25eb2366bf | [
"Apache-2.0"
] | null | null | null | retropie_game_editor/routes/__init__.py | alanquillin/retropie_game_editor | 6bfced066394ea3dc3504cf50af4dd25eb2366bf | [
"Apache-2.0"
] | null | null | null | from retropie_game_editor.routes import games
from retropie_game_editor.routes import tools
| 30.666667 | 45 | 0.891304 | 14 | 92 | 5.571429 | 0.571429 | 0.307692 | 0.410256 | 0.564103 | 0.871795 | 0.871795 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 92 | 2 | 46 | 46 | 0.928571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
7b9fd930435734590eb3454ea7fc8f2a9a5a804b | 1,280 | py | Python | tql/utils/config/set_plot.py | Jie-Yuan/1_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | 14 | 2019-06-25T13:46:32.000Z | 2020-10-27T02:04:59.000Z | tql/utils/config/set_plot.py | Jie-Yuan/2_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | null | null | null | tql/utils/config/set_plot.py | Jie-Yuan/2_DataMining | f5338388b4f883233f350d4fb9c5903180883430 | [
"Apache-2.0"
] | 7 | 2019-06-25T13:26:16.000Z | 2020-10-27T02:05:03.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : tql-Python.
# @File : plot_set
# @Time : 2019-06-20 11:28
# @Author : yuanjie
# @Email : yuanjie@xiaomi.com
# @Software : PyCharm
# @Description :
import matplotlib.pyplot as plt
def set_plot():
"""
plt.rcParams['font.sans-serif'] = ['Simhei'] # 中文乱码的处理
plt.rcParams['font.family'] = 'sans-serif'
plt.rcParams['axes.unicode_minus'] = False # 负号
plt.rcParams["text.usetex"] = False
plt.rcParams["legend.numpoints"] = 1
plt.rcParams["figure.figsize"] = (18, 9) # (12, 6)
plt.rcParams["figure.dpi"] = 128
plt.rcParams["savefig.dpi"] = plt.rcParams["figure.dpi"]
plt.rcParams["font.size"] = 12
plt.rcParams["pdf.fonttype"] = 42
"""
plt.rcParams['font.sans-serif'] = ['Simhei'] # 中文乱码的处理
plt.rcParams['font.family'] = 'sans-serif'
plt.rcParams['axes.unicode_minus'] = False # 负号
plt.rcParams["text.usetex"] = False
plt.rcParams["legend.numpoints"] = 1
plt.rcParams["figure.figsize"] = (18, 9) # (12, 6)
plt.rcParams["figure.dpi"] = 128
plt.rcParams["savefig.dpi"] = plt.rcParams["figure.dpi"]
plt.rcParams["font.size"] = 12
plt.rcParams["pdf.fonttype"] = 42
print('Setting Success!')
| 32.820513 | 60 | 0.604688 | 161 | 1,280 | 4.782609 | 0.403727 | 0.314286 | 0.116883 | 0.103896 | 0.761039 | 0.761039 | 0.761039 | 0.761039 | 0.761039 | 0.761039 | 0 | 0.040315 | 0.205469 | 1,280 | 38 | 61 | 33.684211 | 0.716814 | 0.527344 | 0 | 0 | 0 | 0 | 0.311808 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | true | 0 | 0.076923 | 0 | 0.153846 | 0.076923 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7bc9513107ffa9240546f89db0e06b2bf7aad8a1 | 7,981 | py | Python | configs/doom.py | joel99/midlevel-reps | f0b4a4d8ccf09a0488cd18af24723172aff99446 | [
"MIT"
] | 120 | 2019-04-22T04:45:28.000Z | 2022-03-23T01:53:17.000Z | configs/doom.py | joel99/midlevel-reps | f0b4a4d8ccf09a0488cd18af24723172aff99446 | [
"MIT"
] | 14 | 2019-06-12T08:21:21.000Z | 2021-08-25T15:36:58.000Z | configs/doom.py | joel99/midlevel-reps | f0b4a4d8ccf09a0488cd18af24723172aff99446 | [
"MIT"
] | 19 | 2019-06-19T07:00:36.000Z | 2022-03-24T07:18:30.000Z | # Doom configs
# This should be sourced by the training script,
# which must save a sacred experiment in the variable "ex"
# For descriptions of all fields, see configs/core.py
@ex.named_config
def cfg_doom_navigation():
uuid = 'doom_visualnavigation'
cfg = {}
cfg['learner'] = {
'algo': 'ppo', # Learning algorithm for RL agent. Currently only PPO
'clip_param': 0.1, # Clip param for trust region in PPO
'entropy_coef': 0.01, # Weighting of the entropy term in PPO
'eps': 1e-5, # Small epsilon to prevent divide-by-zero
'gamma': 0.99, # Gamma to use if env.observation_space.shape = 1
'internal_state_size': 512, # If using a recurrent policy, what state size to use
'lr': 0.0001, # Learning rate for algorithm
'num_steps': 200, # Length of each rollout
'num_mini_batch': 16, # Size of PPO minibatch
'num_stack': 4, # Frames that each cell (CNN) can see
'max_grad_norm': 0.5, # Clip grads
'ppo_epoch': 4, # Number of times PPO goes over the buffer
'recurrent_policy': False, # Use a recurrent version with the cell as the standard model
'tau': 0.95, # When using GAE
'use_gae': True, # Whether to use GAE
'value_loss_coef': 0.0001, # Weighting of value_loss in PPO
'perception_network': 'AtariNet',
'test':False,
'use_replay':False,
'replay_buffer_size': 1000,
'on_policy_epoch': 4,
'off_policy_epoch': 0,
}
image_dim = 84
cfg['env'] = {
'add_timestep': False, # Add timestep to the observation
'env_name': 'Doom_VizdoomMultiGoalWithClutterEnv.room-v0',
"env_specific_args": {
# "episode_timeout": 1000,
"episode_timeout": 100,
"n_clutter_objects": 8,
"n_goal_objects": 1
},
'sensors': {
'rgb_filled': None,
'taskonomy': None,
'map': None,
'target': None
},
'transform_fn_pre_aggregation': None,
'transform_fn_post_aggregation': None,
'num_processes': 1,
'additional_repeat_count': 3,
}
cfg['saving'] = {
'port': 8097,
'log_dir': LOG_DIR,
'log_interval': 1,
'save_interval': 100,
'save_dir': 'checkpoints',
'visdom_log_file': os.path.join(LOG_DIR, 'visdom_logs.json'),
'results_log_file': os.path.join(LOG_DIR, 'result_log.pkl'),
'reward_log_file': os.path.join(LOG_DIR, 'rewards.pkl'),
'vis': False,
'vis_interval': 200,
'launcher_script': None,
'visdom_server': 'localhost',
'visdom_port': '8097',
'checkpoint': None,
'checkpoint_configs': False, # copy the metadata of the checkpoint. YMMV.
}
cfg['training'] = {
'cuda': True,
'seed': random.randint(0,1000),
'num_frames': 5e6,
'resumable': True,
}
@ex.named_config
def scratch_doom():
# scratch is not compatible with collate because we need to perform Image operations (resize) to go from
# 256 to 84. This is not implemented with collate code
uuid = 'doom_scratch'
cfg = {}
cfg['learner'] = {
'perception_network': 'AtariNet',
'perception_network_kwargs': {
'n_map_channels': 0,
'use_target': False,
}
}
cfg['env'] = {
'env_specific_kwargs': {
"episode_timeout": 1000,
"n_clutter_objects": 8,
"n_goal_objects": 1
},
'transform_fn_pre_aggregation': """
TransformFactory.splitting(
{
'color': {
'rgb_filled':rescale_centercrop_resize((3,84,84)) }
},
keep_unnamed=False)
""".translate(remove_whitespace),
'transform_fn_post_aggregation': None,
}
@ex.named_config
def cfg_doom_exploration():
uuid = 'doom_myopicexploration'
cfg = {}
cfg['learner'] = {
'algo': 'ppo', # Learning algorithm for RL agent. Currently only PPO
'clip_param': 0.1, # Clip param for trust region in PPO
'entropy_coef': 0.01, # Weighting of the entropy term in PPO
'eps': 1e-5, # Small epsilon to prevent divide-by-zero
'gamma': 0.99, # Gamma to use if env.observation_space.shape = 1
'internal_state_size': 512, # If using a recurrent policy, what state size to use
'lr': 0.0001, # Learning rate for algorithm
'num_steps': 200, # Length of each rollout
'num_mini_batch': 16, # Size of PPO minibatch
'num_stack': 4, # Frames that each cell (CNN) can see
'max_grad_norm': 0.5, # Clip grads
'ppo_epoch': 4, # Number of times PPO goes over the buffer
'recurrent_policy': False, # Use a recurrent version with the cell as the standard model
'tau': 0.95, # When using GAE
'use_gae': True, # Whether to use GAE
'value_loss_coef': 0.0001, # Weighting of value_loss in PPO
'perception_network': 'AtariNet',
'test':False,
'use_replay':False,
'replay_buffer_size': 1000,
'on_policy_epoch': 4,
'off_policy_epoch': 0,
}
image_dim = 84
cfg['env'] = {
'add_timestep': False, # Add timestep to the observation
'env_name': 'Doom_VizdoomExplorationEnv.room-v0',
"env_specific_args": {
"episode_timeout": 2000,
},
'sensors': {
'rgb_filled': None,
'taskonomy': None,
'map': None,
'occupancy': None
},
'transform_fn_pre_aggregation': None,
'transform_fn_post_aggregation': None,
'num_processes': 1,
'additional_repeat_count': 3,
}
cfg['saving'] = {
'port': 8097,
'log_dir': LOG_DIR,
'log_interval': 1,
'save_interval': 100,
'save_dir': 'checkpoints',
'visdom_log_file': os.path.join(LOG_DIR, 'visdom_logs.json'),
'results_log_file': os.path.join(LOG_DIR, 'result_log.pkl'),
'reward_log_file': os.path.join(LOG_DIR, 'rewards.pkl'),
'vis': False,
'vis_interval': 200,
'launcher_script': None,
'visdom_server': 'localhost',
'visdom_port': '8097',
'checkpoint': None,
'checkpoint_configs': False, # copy the metadata of the checkpoint. YMMV.
}
cfg['training'] = {
'cuda': True,
'seed': random.randint(0,1000),
'num_frames': 5e5,
'resumable': True,
}
@ex.named_config
def scratch_doom_exploration():
# scratch is not compatible with collate because we need to perform Image operations (resize) to go from
# 256 to 84. This is not implemented with collate code
uuid = 'doom_scratch_exploration'
cfg = {}
cfg['learner'] = {
'perception_network': 'AtariNet',
'perception_network_kwargs': {
'n_map_channels': 1,
'use_target': False,
}
}
cfg['env'] = {
'env_specific_kwargs': {
},
'transform_fn_pre_aggregation': """
TransformFactory.splitting(
{
'color': {
'rgb_filled':rescale_centercrop_resize((3,84,84)) },
'occupancy': {
'map': rescale_centercrop_resize((1,84,84))}
},
keep_unnamed=False)
""".translate(remove_whitespace),
'transform_fn_post_aggregation': None,
}
| 35.95045 | 108 | 0.543165 | 887 | 7,981 | 4.668546 | 0.263811 | 0.014489 | 0.01304 | 0.018836 | 0.907993 | 0.907993 | 0.896885 | 0.879981 | 0.809466 | 0.809466 | 0 | 0.035081 | 0.342814 | 7,981 | 221 | 109 | 36.113122 | 0.754433 | 0.222153 | 0 | 0.756477 | 0 | 0 | 0.413961 | 0.106656 | 0 | 0 | 0 | 0 | 0 | 1 | 0.020725 | false | 0 | 0 | 0 | 0.020725 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cdddac10d0b1294b8cb1c1a263ccf4d553be9181 | 127 | py | Python | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/cameras/handlers/__init__.py | jpmarques19/tensorflwo-test | 0ff8b06e0415075c7269820d080284a42595bb2e | [
"Apache-2.0"
] | 2,610 | 2020-10-01T14:14:53.000Z | 2022-03-31T18:02:31.000Z | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/cameras/handlers/__init__.py | jpmarques19/tensorflwo-test | 0ff8b06e0415075c7269820d080284a42595bb2e | [
"Apache-2.0"
] | 1,959 | 2020-09-30T20:22:42.000Z | 2022-03-31T23:58:37.000Z | reinforcement_learning/rl_deepracer_robomaker_coach_gazebo/src/markov/cameras/handlers/__init__.py | jpmarques19/tensorflwo-test | 0ff8b06e0415075c7269820d080284a42595bb2e | [
"Apache-2.0"
] | 2,052 | 2020-09-30T22:11:46.000Z | 2022-03-31T23:02:51.000Z | from markov.cameras.handlers.follow_car_camera import FollowCarCamera
from markov.cameras.handlers.top_camera import TopCamera
| 42.333333 | 69 | 0.889764 | 17 | 127 | 6.470588 | 0.647059 | 0.181818 | 0.309091 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.062992 | 127 | 2 | 70 | 63.5 | 0.92437 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b527389e9755e0e06ed6a8d47350d6bc82028163 | 132 | py | Python | orders/admin.py | SiddhantNaik17/TheBashTeam_website | a4c3e023599fa9f1b0afa6485346b5b7b883e7f5 | [
"MIT"
] | null | null | null | orders/admin.py | SiddhantNaik17/TheBashTeam_website | a4c3e023599fa9f1b0afa6485346b5b7b883e7f5 | [
"MIT"
] | null | null | null | orders/admin.py | SiddhantNaik17/TheBashTeam_website | a4c3e023599fa9f1b0afa6485346b5b7b883e7f5 | [
"MIT"
] | 1 | 2020-11-21T16:03:30.000Z | 2020-11-21T16:03:30.000Z | from django.contrib import admin
from orders.models import Order, Address
admin.site.register(Address)
admin.site.register(Order)
| 18.857143 | 40 | 0.818182 | 19 | 132 | 5.684211 | 0.578947 | 0.222222 | 0.296296 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098485 | 132 | 6 | 41 | 22 | 0.907563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
b5950d02b8c0955813471e397905fbe2511da694 | 327 | py | Python | tests/parser/checker.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/checker.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/checker.4.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
a | c :- d, e.
d :- a.
d :- b.
b :- a.
e :- f, g, i.
f :- g, e, i.
i :- e, f, h.
g :- f, h, e, i.
c :- h, g.
h :- i.
a | e.
"""
output = """
a | c :- d, e.
d :- a.
d :- b.
b :- a.
e :- f, g, i.
f :- g, e, i.
i :- e, f, h.
g :- f, h, e, i.
c :- h, g.
h :- i.
a | e.
"""
| 8.384615 | 17 | 0.24159 | 70 | 327 | 1.128571 | 0.157143 | 0.101266 | 0.075949 | 0.101266 | 0.860759 | 0.860759 | 0.860759 | 0.860759 | 0.860759 | 0.860759 | 0 | 0 | 0.446483 | 327 | 38 | 18 | 8.605263 | 0.436464 | 0 | 0 | 0.923077 | 0 | 0 | 0.894198 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
a9257a52b380103eed67330a699b4aeda41fa151 | 175 | py | Python | pydeploy/tests/__init__.py | yukinotenshi/pydeploy | e3ddf907b293c9da28503b3d72414a303c5dfbed | [
"MIT"
] | 2 | 2018-12-08T16:18:13.000Z | 2020-04-05T11:13:01.000Z | pydeploy/tests/__init__.py | yukinotenshi/pydeploy | e3ddf907b293c9da28503b3d72414a303c5dfbed | [
"MIT"
] | 2 | 2021-06-01T23:08:24.000Z | 2021-11-27T06:13:41.000Z | pydeploy/tests/__init__.py | yukinotenshi/pydeploy | e3ddf907b293c9da28503b3d72414a303c5dfbed | [
"MIT"
] | 1 | 2020-01-25T10:51:41.000Z | 2020-01-25T10:51:41.000Z | from pydeploy.tests.test_command import *
from pydeploy.tests.test_command_chain import *
from pydeploy.tests.test_notifier import *
from pydeploy.tests.test_webhook import *
| 35 | 47 | 0.84 | 25 | 175 | 5.68 | 0.36 | 0.338028 | 0.478873 | 0.591549 | 0.816901 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.091429 | 175 | 4 | 48 | 43.75 | 0.893082 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
a9329e5f794ac7ff89bdca36f5443f64d702e232 | 10,357 | py | Python | tests/test_namespace/test_include_directive.py | encodis/mokuwiki | e0f25a676ed3d53fc63a760dd4f1c87442306334 | [
"MIT"
] | null | null | null | tests/test_namespace/test_include_directive.py | encodis/mokuwiki | e0f25a676ed3d53fc63a760dd4f1c87442306334 | [
"MIT"
] | null | null | null | tests/test_namespace/test_include_directive.py | encodis/mokuwiki | e0f25a676ed3d53fc63a760dd4f1c87442306334 | [
"MIT"
] | null | null | null | import os
from mokuwiki.wiki import Wiki
from utils import create_wiki_config, create_markdown_file, create_markdown_string, compare_markdown_content
def test_process_file_includes(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "file2.md")}>>')
create_markdown_file(source_dir.join('ns1', 'file2.md'),
{'title': 'Page Two'},
'Included Text')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''Included Text''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_globbing(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "fileX*.md")}>>')
create_markdown_file(source_dir.join('ns1', 'fileX2.md'),
{'title': 'Page Two'},
'Included Text 2')
create_markdown_file(source_dir.join('ns1', 'fileX3.md'),
{'title': 'Page Three'},
'Included Text 3')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''Included Text 2
Included Text 3''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_separator(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "fileX*.md|* * *")}>>')
create_markdown_file(source_dir.join('ns1', 'fileX2.md'),
{'title': 'Page Two'},
'Included Text 2')
create_markdown_file(source_dir.join('ns1', 'fileX3.md'),
{'title': 'Page Three'},
'Included Text 3')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''Included Text 2
* * *
Included Text 3''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_line_prefix(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "file2.md")}||> >>')
create_markdown_file(source_dir.join('ns1', 'file2.md'),
{'title': 'Page Two'},
'''Included Text''')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''> Included Text''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_separator_and_line_prefix(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "fileX*.md")}|* * *|> >>')
create_markdown_file(source_dir.join('ns1', 'fileX2.md'),
{'title': 'Page Two'},
'Included Text 2')
create_markdown_file(source_dir.join('ns1', 'fileX3.md'),
{'title': 'Page Three'},
'Included Text 3')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''> Included Text 2
* * *
> Included Text 3''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_prefix_and_suffix(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "file2.md")}>>')
create_markdown_file(source_dir.join('ns1', 'file2.md'),
{'title': 'Page Two',
'prefix': '"The prefix line\n\n"',
'suffix': '"\n\nThe suffix line"'},
'Included Text')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''The prefix line
Included Text
The suffix line''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_metadata_replace(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "file2.md")}>>')
create_markdown_file(source_dir.join('ns1', 'file2.md'),
{'title': 'Page Two'},
'Included page is ?{title}')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''Included page is Page Two''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
def test_process_file_includes_metadata_replace_multi(tmpdir):
source_dir = tmpdir.mkdir('source')
source_dir.mkdir('ns1')
target_dir = tmpdir.mkdir('target')
create_markdown_file(source_dir.join('ns1', 'file1.md'),
{'title': 'Page One'},
f'<<{source_dir.join("ns1", "file2.md")}>>')
create_markdown_file(source_dir.join('ns1', 'file2.md'),
{'title': 'Page Two',
'subtitle': 'Second Page'},
'Included page is ?{title} with subtitle ?{subtitle}')
create_wiki_config(str(source_dir.join('test.cfg')),
None,
{'name': 'ns1',
'path': f'{source_dir.join("ns1")}',
'target': str(target_dir)})
wiki = Wiki(source_dir.join('test.cfg'))
wiki.process_namespaces()
expect1 = create_markdown_string({'title': 'Page One'},
'''Included page is Page Two with subtitle Second Page''')
assert os.path.exists(target_dir.join('ns1', 'page_one.md'))
with open(target_dir.join('ns1', 'page_one.md'), 'r', encoding='utf8') as fh:
actual1 = fh.read()
assert compare_markdown_content(expect1, actual1)
| 33.089457 | 108 | 0.534518 | 1,159 | 10,357 | 4.554789 | 0.062985 | 0.114226 | 0.125592 | 0.106081 | 0.938246 | 0.933321 | 0.933321 | 0.933321 | 0.933321 | 0.933321 | 0 | 0.02 | 0.309646 | 10,357 | 312 | 109 | 33.195513 | 0.718322 | 0 | 0 | 0.854271 | 0 | 0 | 0.201356 | 0.039094 | 0 | 0 | 0 | 0 | 0.080402 | 1 | 0.040201 | false | 0 | 0.015075 | 0 | 0.055276 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a94898877044a8e5124a5b94c784274af147dab3 | 28 | py | Python | Chapter-1-Introduction/ex1.py | ramyaburgula/spring2022 | a312e12efb4ea02f2fbdd273601e32135b2269c2 | [
"MIT"
] | null | null | null | Chapter-1-Introduction/ex1.py | ramyaburgula/spring2022 | a312e12efb4ea02f2fbdd273601e32135b2269c2 | [
"MIT"
] | null | null | null | Chapter-1-Introduction/ex1.py | ramyaburgula/spring2022 | a312e12efb4ea02f2fbdd273601e32135b2269c2 | [
"MIT"
] | null | null | null | print("CIS5755 Fall 2021")
| 14 | 27 | 0.714286 | 4 | 28 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0.142857 | 28 | 1 | 28 | 28 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0.607143 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
a949120d99392178854d311ef7b3978b827d7acb | 1,653 | py | Python | src/mappings.py | AminKaram/BipolarABASolver | 3b858d8ea21ad9f39a393afbd5000932060d48a1 | [
"MIT"
] | 1 | 2021-08-09T10:48:36.000Z | 2021-08-09T10:48:36.000Z | src/mappings.py | AminKaram/BipolarABASolver | 3b858d8ea21ad9f39a393afbd5000932060d48a1 | [
"MIT"
] | 1 | 2020-03-30T03:23:33.000Z | 2020-03-30T10:23:47.000Z | src/mappings.py | AminKaram/FYP | 3b858d8ea21ad9f39a393afbd5000932060d48a1 | [
"MIT"
] | 2 | 2019-10-07T12:14:24.000Z | 2021-02-26T08:32:28.000Z | from src.bipolar_aba import BipolarABA, Rule
def map_baf_to_naba_framework(baf_framework):
'''
:param baf_framework: A BAF object.
:return: A BipolarABA object corresponding to the n-ABA framework of BAF in the spirit of [CST17]
'''
assumptions = set()
contraries = set()
rules = set()
assumptions_contrary_mapping = {}
for arg in baf_framework.arguments:
contraries.add(arg + '_contrary')
assumptions.add(arg)
assumptions_contrary_mapping[arg] = arg + '_contrary'
language = assumptions.union(contraries)
for attack in baf_framework.attacks:
rules.add(Rule(attack[0], attack[1] + '_contrary'))
for support in baf_framework.supports:
rules.add(Rule(support[1], support[0]))
return BipolarABA(language, rules, assumptions, assumptions_contrary_mapping)
def map_baf_to_daba_framework(baf_framework):
'''
:param baf_framework: A BAF object.
:return: A BipolarABA object corresponding to the d-ABA framework of BAF in the spirit of [CST17]
'''
assumptions = set()
contraries = set()
rules = set()
assumptions_contrary_mapping = {}
for arg in baf_framework.arguments:
contraries.add(arg + '_contrary')
assumptions.add(arg)
assumptions_contrary_mapping[arg] = arg + '_contrary'
language = assumptions.union(contraries)
for attack in baf_framework.attacks:
rules.add(Rule(attack[0], attack[1] + '_contrary'))
for support in baf_framework.supports:
rules.add(Rule(support[0], support[1]))
return BipolarABA(language, rules, assumptions, assumptions_contrary_mapping)
| 33.06 | 101 | 0.692075 | 203 | 1,653 | 5.453202 | 0.216749 | 0.108401 | 0.140921 | 0.019874 | 0.921409 | 0.921409 | 0.921409 | 0.921409 | 0.802168 | 0.802168 | 0 | 0.00916 | 0.207502 | 1,653 | 49 | 102 | 33.734694 | 0.835878 | 0.161525 | 0 | 0.83871 | 0 | 0 | 0.040089 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.064516 | false | 0 | 0.032258 | 0 | 0.16129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8d6817e978f1952573c042ff3f00623123e33bcc | 8,575 | py | Python | lab-taxi/agent.py | dragonoken/deep-reinforcement-learning | c2b791ddf486dbe762ccde6938deba6b291e9aa7 | [
"MIT"
] | 1 | 2019-07-07T02:19:27.000Z | 2019-07-07T02:19:27.000Z | lab-taxi/agent.py | dragonoken/deep-reinforcement-learning | c2b791ddf486dbe762ccde6938deba6b291e9aa7 | [
"MIT"
] | null | null | null | lab-taxi/agent.py | dragonoken/deep-reinforcement-learning | c2b791ddf486dbe762ccde6938deba6b291e9aa7 | [
"MIT"
] | 1 | 2019-07-07T02:09:47.000Z | 2019-07-07T02:09:47.000Z | import numpy as np
import sys
from collections import defaultdict
class Agent_A:
def __init__(self, nA=6):
""" Initialize agent.
Params
======
- nA: number of actions available to the agent
"""
self.nA = nA
self.Q = defaultdict(lambda: np.zeros(self.nA))
self.mode = ['zero', 'max', 'expected'][2]
self.initial_alpha = 0.2
self.alpha = self.initial_alpha
self.min_alpha = 0.0001
self.final_alpha = None
self.alpha_decay_mode = ['linear', 'exponential'][0]
self.alpha_decay_duration = 100000
if self.alpha_decay_mode == 'linear':
self.alpha_decay_rate = (self.min_alpha - self.initial_alpha) / self.alpha_decay_duration
else:
self.alpha_decay_factor = (self.min_alpha / self.initial_alpha) ** (1 / self.alpha_decay_duration)
self.initial_epsilon = 1.0
self.epsilon = self.initial_epsilon
self.min_epsilon = 0.000001
self.final_epsilon = None
self.epsilon_decay_mode = ['linear', 'exponential'][1]
self.epsilon_decay_duration = 100000
if self.epsilon_decay_mode == 'linear':
self.epsilon_decay_rate = (self.min_epsilon - self.initial_epsilon) / self.epsilon_decay_duration
else:
self.epsilon_decay_factor = (self.min_epsilon / self.initial_epsilon) ** (1 / self.epsilon_decay_duration)
self.reached_min_alpha = (self.alpha <= self.min_alpha)
self.reached_min_epsilon = (self.epsilon <= self.min_epsilon)
self.gamma = 1.0
def select_action(self, state):
""" Given the state, select an action.
Params
======
- state: the current state of the environment
Returns
=======
- action: an integer, compatible with the task's action space
"""
action_values = self.Q[state]
is_max = np.equal(action_values, action_values.max())
probs = np.full(self.nA, self.epsilon / self.nA) + is_max * (1 - self.epsilon) / sum(is_max)
action = np.random.choice(self.nA, p=probs)
return action
def step(self, state, action, reward, next_state, done):
""" Update the agent's knowledge, using the most recently sampled tuple.
Params
======
- state: the previous state of the environment
- action: the agent's previous choice of action
- reward: last reward received
- next_state: the current state of the environment
- done: whether the episode is complete (True or False)
"""
action_values = self.Q[next_state]
is_max = np.equal(action_values, action_values.max())
probs = np.full(self.nA, self.epsilon / self.nA) + is_max * (1 - self.epsilon) / sum(is_max)
if self.mode == 'zero':
expected_return = np.random.choice(action_values, p=probs)
elif self.mode == 'max':
expected_return = np.max(action_values)
else:
expected_return = np.sum(np.multiply(probs, action_values))
self.Q[state][action] += self.alpha * (reward + self.gamma * expected_return - self.Q[state][action])
if done:
if not self.reached_min_alpha:
if self.alpha_decay_mode == 'linear':
self.alpha += self.alpha_decay_rate
elif self.alpha_decay_mode == 'exponential':
self.alpha *= self.alpha_decay_factor
else:
raise RuntimeError("Invalid Mode: {}".format(self.alpha_decay_mode))
if self.alpha <= self.min_alpha:
self.reached_min_alpha = True
if self.final_alpha is None:
self.alpha = self.min_alpha
else:
self.alpha = self.final_alpha
if not self.reached_min_epsilon:
if self.epsilon_decay_mode == 'linear':
self.epsilon += self.epsilon_decay_rate
elif self.epsilon_decay_mode == 'exponential':
self.epsilon *= self.epsilon_decay_factor
else:
raise RuntimeError("Invalid Mode: {}".format(self.epsilon_decay_mode))
if self.epsilon <= self.min_epsilon:
self.reached_min_epsilon = True
if self.final_epsilon is None:
self.epsilon = self.min_epsilon
else:
self.epsilon = self.final_epsilon
class Agent_B:
def __init__(self, nA=6):
""" Initialize agent.
Params
======
- nA: number of actions available to the agent
"""
self.nA = nA
self.Q = defaultdict(lambda: np.zeros(self.nA))
self.initial_alpha = 0.3
self.alpha = self.initial_alpha
self.min_alpha = 0.001
self.alpha_decay_mode = ['linear', 'exponential'][0]
self.alpha_decay_duration = 20000
if self.alpha_decay_mode == 'linear':
self.alpha_decay_rate = (self.min_alpha - self.initial_alpha) / self.alpha_decay_duration
else:
self.alpha_decay_factor = (self.min_alpha / self.initial_alpha) ** (1 / self.alpha_decay_duration)
self.initial_epsilon = 1.0
self.epsilon = self.initial_alpha
self.min_epsilon = 0.00001
self.epsilon_decay_mode = ['linear', 'exponential'][1]
self.epsilon_decay_duration = 20000
if self.epsilon_decay_mode == 'linear':
self.epsilon_decay_rate = (self.min_epsilon - self.initial_epsilon) / self.epsilon_decay_duration
else:
self.epsilon_decay_factor = (self.min_epsilon / self.initial_epsilon) ** (1 / self.epsilon_decay_duration)
self.reached_min_alpha = (self.alpha <= self.min_alpha)
self.reached_min_epsilon = (self.epsilon <= self.min_epsilon)
def select_action(self, state):
""" Given the state, select an action.
Params
======
- state: the current state of the environment
Returns
=======
- action: an integer, compatible with the task's action space
"""
action_values = self.Q[state]
is_max = np.equal(action_values, action_values.max())
probs = np.full(self.nA, self.epsilon / self.nA) + is_max * (1 - self.epsilon) / sum(is_max)
action = np.random.choice(self.nA, p=probs)
return action
def step(self, state, action, reward, next_state, done):
""" Update the agent's knowledge, using the most recently sampled tuple.
Params
======
- state: the previous state of the environment
- action: the agent's previous choice of action
- reward: last reward received
- next_state: the current state of the environment
- done: whether the episode is complete (True or False)
"""
action_values = self.Q[next_state]
is_max = np.equal(action_values, action_values.max())
probs = np.full(self.nA, self.epsilon / self.nA) + is_max * (1 - self.epsilon) / sum(is_max)
self.Q[state][action] += self.alpha * (reward + np.sum(probs * action_values) - self.Q[state][action])
if done:
if not self.reached_min_alpha:
if self.alpha_decay_mode == 'linear':
self.alpha += self.alpha_decay_rate
elif self.alpha_decay_mode == 'exponential':
self.alpha *= self.alpha_decay_factor
else:
raise RuntimeError("Invalid Mode: {}".format(self.alpha_decay_mode))
if self.alpha <= self.min_alpha:
self.reached_min_alpha = True
self.alpha = self.min_alpha
if not self.reached_min_epsilon:
if self.epsilon_decay_mode == 'linear':
self.epsilon += self.epsilon_decay_rate
elif self.epsilon_decay_mode == 'exponential':
self.epsilon *= self.epsilon_decay_factor
else:
raise RuntimeError("Invalid Mode: {}".format(self.epsilon_decay_mode))
if self.epsilon <= self.min_epsilon:
self.reached_min_epsilon = True
self.epsilon = self.min_epsilon
Agent = Agent_A
| 40.258216 | 118 | 0.578192 | 1,015 | 8,575 | 4.670936 | 0.107389 | 0.104408 | 0.070871 | 0.037967 | 0.896857 | 0.869015 | 0.868171 | 0.847922 | 0.847922 | 0.831892 | 0 | 0.011868 | 0.321983 | 8,575 | 212 | 119 | 40.448113 | 0.803578 | 0.133294 | 0 | 0.75 | 0 | 0 | 0.034721 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.022727 | 0 | 0.098485 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8da7fcb89cb9b5afcf8637d3035d7e33dcba8274 | 2,401 | py | Python | tests/test_agents_common_is_result_better.py | InesVogel/Connect4 | 9528115515fb33d107ebc26d4141a1d3effdca5e | [
"MIT"
] | null | null | null | tests/test_agents_common_is_result_better.py | InesVogel/Connect4 | 9528115515fb33d107ebc26d4141a1d3effdca5e | [
"MIT"
] | null | null | null | tests/test_agents_common_is_result_better.py | InesVogel/Connect4 | 9528115515fb33d107ebc26d4141a1d3effdca5e | [
"MIT"
] | null | null | null | import math
from agents.common import PLAYER1, PLAYER2, DEPTH, is_result_better
def test_is_result_better_PLAYER1_true():
best_score = -math.inf
best_num_moves = DEPTH
tmp_score = 1000
tmp_num_moves = DEPTH
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == True
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == False
def test_is_result_better_PLAYER2_true():
best_score = math.inf
best_num_moves = DEPTH
tmp_score = -1000
tmp_num_moves = DEPTH
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == True
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == False
def test_is_result_better_PLAYER1_tmpScoreLower_numMovesEqual_false():
best_score = 1000
best_num_moves = DEPTH
tmp_score = 999
tmp_num_moves = DEPTH
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == False
def test_is_result_better_PLAYER2_tmpScoreHigher_numMovesEqual_false():
best_score = 999
best_num_moves = DEPTH
tmp_score = 1000
tmp_num_moves = DEPTH
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == False
def test_is_result_better_scoresEqual_numMovesEqual_false():
best_score = 1000
best_num_moves = DEPTH
tmp_score = 1000
tmp_num_moves = DEPTH
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == False
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == False
# TODO: figure out if needed
def test_is_result_better_scoresEqual_numMovesLower_true():
best_score = 1000
best_num_moves = 4
tmp_score = 1000
tmp_num_moves = 3
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == True
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == True
def test_is_result_better_scoresEqual_numMovesHigher_false():
best_score = 1000
best_num_moves = 4
tmp_score = 1000
tmp_num_moves = 5
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER1) == False
assert is_result_better(best_score, best_num_moves, tmp_score, tmp_num_moves, PLAYER2) == False
| 32.890411 | 99 | 0.772595 | 365 | 2,401 | 4.583562 | 0.10411 | 0.18171 | 0.167364 | 0.143455 | 0.893007 | 0.893007 | 0.827256 | 0.8159 | 0.8159 | 0.8159 | 0 | 0.033831 | 0.162849 | 2,401 | 72 | 100 | 33.347222 | 0.798507 | 0.010829 | 0 | 0.673469 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013889 | 0.244898 | 1 | 0.142857 | false | 0 | 0.040816 | 0 | 0.183673 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a5deee6566dcbb865222cdb20269547218779cc2 | 1,895 | py | Python | state_formatters/output_formatters.py | oserikov/dream | 109ba2df799025dcdada1fddbb7380e1c03100eb | [
"Apache-2.0"
] | 34 | 2021-08-18T14:51:44.000Z | 2022-03-10T14:14:48.000Z | state_formatters/output_formatters.py | oserikov/dream | 109ba2df799025dcdada1fddbb7380e1c03100eb | [
"Apache-2.0"
] | 27 | 2021-08-30T14:42:09.000Z | 2022-03-17T22:11:45.000Z | state_formatters/output_formatters.py | oserikov/dream | 109ba2df799025dcdada1fddbb7380e1c03100eb | [
"Apache-2.0"
] | 40 | 2021-08-22T07:13:32.000Z | 2022-03-29T11:45:32.000Z | from typing import Dict
import logging
import difflib
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def http_api_output_formatter(payload: Dict):
response = payload["utterances"][-1]["text"]
active_skill = payload["utterances"][-1]["active_skill"]
ssml_tagged_response = []
for hyp in payload["utterances"][-2]["hypotheses"]:
if hyp.get("skill_name") == active_skill and hyp.get("ssml_tagged_text"):
if difflib.SequenceMatcher(None, hyp.get("text", "").split(), response.split()).ratio() > 0.9:
ssml_tagged_response.append(hyp["ssml_tagged_text"])
ssml_tagged_response = ssml_tagged_response[-1] if ssml_tagged_response else ""
ret_val = {
"user_id": payload["human"]["user_telegram_id"],
"response": response,
"ssml_tagged_response": ssml_tagged_response,
"active_skill": active_skill,
}
logger.info(f"http api output {ret_val}")
return ret_val
def http_debug_output_formatter(payload: Dict):
response = payload["utterances"][-1]["text"]
active_skill = payload["utterances"][-1]["active_skill"]
ssml_tagged_response = []
for hyp in payload["utterances"][-2]["hypotheses"]:
if hyp.get("skill_name") == active_skill and hyp.get("ssml_tagged_text"):
if difflib.SequenceMatcher(None, hyp.get("text", "").split(), response.split()).ratio() > 0.9:
ssml_tagged_response.append(hyp["ssml_tagged_text"])
ssml_tagged_response = ssml_tagged_response[-1] if ssml_tagged_response else ""
ret_val = {
"user_id": payload["human"]["user_telegram_id"],
"response": response,
"active_skill": active_skill,
"ssml_tagged_response": ssml_tagged_response,
"debug_output": payload["utterances"][-2]["hypotheses"],
}
logger.info(f"http api output {ret_val}")
return ret_val
| 39.479167 | 106 | 0.669657 | 236 | 1,895 | 5.076271 | 0.211864 | 0.15025 | 0.210351 | 0.108514 | 0.868114 | 0.831386 | 0.771285 | 0.771285 | 0.771285 | 0.771285 | 0 | 0.008398 | 0.183113 | 1,895 | 47 | 107 | 40.319149 | 0.765504 | 0 | 0 | 0.75 | 0 | 0 | 0.222691 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.075 | 0 | 0.175 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a5ecae9a8751653dda4ffc416bfeb54fab1fa0f4 | 16,621 | py | Python | sdk/python/pulumi_gcp/organizations/iam_audit_config.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 121 | 2018-06-18T19:16:42.000Z | 2022-03-31T06:06:48.000Z | sdk/python/pulumi_gcp/organizations/iam_audit_config.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 492 | 2018-06-22T19:41:03.000Z | 2022-03-31T15:33:53.000Z | sdk/python/pulumi_gcp/organizations/iam_audit_config.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 43 | 2018-06-19T01:43:13.000Z | 2022-03-23T22:43:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['IamAuditConfigArgs', 'IamAuditConfig']
@pulumi.input_type
class IamAuditConfigArgs:
def __init__(__self__, *,
audit_log_configs: pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]],
org_id: pulumi.Input[str],
service: pulumi.Input[str]):
"""
The set of arguments for constructing a IamAuditConfig resource.
:param pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]] audit_log_configs: The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
:param pulumi.Input[str] org_id: The numeric ID of the organization in which you want to manage the audit logging config.
:param pulumi.Input[str] service: Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
pulumi.set(__self__, "audit_log_configs", audit_log_configs)
pulumi.set(__self__, "org_id", org_id)
pulumi.set(__self__, "service", service)
@property
@pulumi.getter(name="auditLogConfigs")
def audit_log_configs(self) -> pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]]:
"""
The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
"""
return pulumi.get(self, "audit_log_configs")
@audit_log_configs.setter
def audit_log_configs(self, value: pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]]):
pulumi.set(self, "audit_log_configs", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Input[str]:
"""
The numeric ID of the organization in which you want to manage the audit logging config.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: pulumi.Input[str]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter
def service(self) -> pulumi.Input[str]:
"""
Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
return pulumi.get(self, "service")
@service.setter
def service(self, value: pulumi.Input[str]):
pulumi.set(self, "service", value)
@pulumi.input_type
class _IamAuditConfigState:
def __init__(__self__, *,
audit_log_configs: Optional[pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]]] = None,
etag: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering IamAuditConfig resources.
:param pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]] audit_log_configs: The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
:param pulumi.Input[str] etag: The etag of iam policy
:param pulumi.Input[str] org_id: The numeric ID of the organization in which you want to manage the audit logging config.
:param pulumi.Input[str] service: Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
if audit_log_configs is not None:
pulumi.set(__self__, "audit_log_configs", audit_log_configs)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if org_id is not None:
pulumi.set(__self__, "org_id", org_id)
if service is not None:
pulumi.set(__self__, "service", service)
@property
@pulumi.getter(name="auditLogConfigs")
def audit_log_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]]]:
"""
The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
"""
return pulumi.get(self, "audit_log_configs")
@audit_log_configs.setter
def audit_log_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IamAuditConfigAuditLogConfigArgs']]]]):
pulumi.set(self, "audit_log_configs", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
The etag of iam policy
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> Optional[pulumi.Input[str]]:
"""
The numeric ID of the organization in which you want to manage the audit logging config.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input[str]]:
"""
Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service", value)
class IamAuditConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_log_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IamAuditConfigAuditLogConfigArgs']]]]] = None,
org_id: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows management of audit logging config for a given service for a Google Cloud Platform Organization.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
config = gcp.organizations.IamAuditConfig("config",
audit_log_configs=[gcp.organizations.IamAuditConfigAuditLogConfigArgs(
exempted_members=["user:joebloggs@hashicorp.com"],
log_type="DATA_READ",
)],
org_id="your-organization-id",
service="allServices")
```
## Import
IAM audit config imports use the identifier of the resource in question and the service, e.g.
```sh
$ pulumi import gcp:organizations/iamAuditConfig:IamAuditConfig config "your-organization-id foo.googleapis.com"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IamAuditConfigAuditLogConfigArgs']]]] audit_log_configs: The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
:param pulumi.Input[str] org_id: The numeric ID of the organization in which you want to manage the audit logging config.
:param pulumi.Input[str] service: Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IamAuditConfigArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows management of audit logging config for a given service for a Google Cloud Platform Organization.
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
config = gcp.organizations.IamAuditConfig("config",
audit_log_configs=[gcp.organizations.IamAuditConfigAuditLogConfigArgs(
exempted_members=["user:joebloggs@hashicorp.com"],
log_type="DATA_READ",
)],
org_id="your-organization-id",
service="allServices")
```
## Import
IAM audit config imports use the identifier of the resource in question and the service, e.g.
```sh
$ pulumi import gcp:organizations/iamAuditConfig:IamAuditConfig config "your-organization-id foo.googleapis.com"
```
:param str resource_name: The name of the resource.
:param IamAuditConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IamAuditConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
audit_log_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IamAuditConfigAuditLogConfigArgs']]]]] = None,
org_id: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IamAuditConfigArgs.__new__(IamAuditConfigArgs)
if audit_log_configs is None and not opts.urn:
raise TypeError("Missing required property 'audit_log_configs'")
__props__.__dict__["audit_log_configs"] = audit_log_configs
if org_id is None and not opts.urn:
raise TypeError("Missing required property 'org_id'")
__props__.__dict__["org_id"] = org_id
if service is None and not opts.urn:
raise TypeError("Missing required property 'service'")
__props__.__dict__["service"] = service
__props__.__dict__["etag"] = None
super(IamAuditConfig, __self__).__init__(
'gcp:organizations/iamAuditConfig:IamAuditConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
audit_log_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IamAuditConfigAuditLogConfigArgs']]]]] = None,
etag: Optional[pulumi.Input[str]] = None,
org_id: Optional[pulumi.Input[str]] = None,
service: Optional[pulumi.Input[str]] = None) -> 'IamAuditConfig':
"""
Get an existing IamAuditConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IamAuditConfigAuditLogConfigArgs']]]] audit_log_configs: The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
:param pulumi.Input[str] etag: The etag of iam policy
:param pulumi.Input[str] org_id: The numeric ID of the organization in which you want to manage the audit logging config.
:param pulumi.Input[str] service: Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IamAuditConfigState.__new__(_IamAuditConfigState)
__props__.__dict__["audit_log_configs"] = audit_log_configs
__props__.__dict__["etag"] = etag
__props__.__dict__["org_id"] = org_id
__props__.__dict__["service"] = service
return IamAuditConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="auditLogConfigs")
def audit_log_configs(self) -> pulumi.Output[Sequence['outputs.IamAuditConfigAuditLogConfig']]:
"""
The configuration for logging of each type of permission. This can be specified multiple times. Structure is documented below.
"""
return pulumi.get(self, "audit_log_configs")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
The etag of iam policy
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Output[str]:
"""
The numeric ID of the organization in which you want to manage the audit logging config.
"""
return pulumi.get(self, "org_id")
@property
@pulumi.getter
def service(self) -> pulumi.Output[str]:
"""
Service which will be enabled for audit logging. The special value `allServices` covers all services. Note that if there are google\_organization\_iam\_audit\_config resources covering both `allServices` and a specific service then the union of the two AuditConfigs is used for that service: the `log_types` specified in each `audit_log_config` are enabled, and the `exempted_members` in each `audit_log_config` are exempted.
"""
return pulumi.get(self, "service")
| 51.778816 | 469 | 0.681307 | 2,020 | 16,621 | 5.39901 | 0.09802 | 0.062534 | 0.046763 | 0.032276 | 0.826976 | 0.809004 | 0.786723 | 0.766275 | 0.749771 | 0.729048 | 0 | 0.000078 | 0.230552 | 16,621 | 320 | 470 | 51.940625 | 0.852686 | 0.466217 | 0 | 0.518072 | 1 | 0 | 0.129538 | 0.046121 | 0 | 0 | 0 | 0 | 0 | 1 | 0.150602 | false | 0.006024 | 0.042169 | 0 | 0.283133 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
573a2d0a47dfb91ab469b1b58b576760bdd862fd | 2,571 | py | Python | bayesian_optimization/fully_train_ENAS.py | jojker/D-VAE | 2ff77caae4078bbb7ac9c5e0a506d32d0180507d | [
"MIT"
] | 95 | 2019-05-14T20:55:26.000Z | 2022-03-26T13:32:42.000Z | bayesian_optimization/fully_train_ENAS.py | jojker/D-VAE | 2ff77caae4078bbb7ac9c5e0a506d32d0180507d | [
"MIT"
] | 7 | 2019-11-25T08:24:47.000Z | 2021-09-12T13:29:14.000Z | bayesian_optimization/fully_train_ENAS.py | jojker/D-VAE | 2ff77caae4078bbb7ac9c5e0a506d32d0180507d | [
"MIT"
] | 24 | 2019-05-14T20:55:38.000Z | 2022-01-16T11:29:39.000Z | import os
import pdb
import numpy as np
gpu_id = 0
# S-VAE
arcs_scores = '''
5 4 0 5 0 0 5 0 0 1 5 1 0 0 0 5 0 0 1 1 0 0.7502
5 5 0 4 0 0 5 0 0 1 2 1 0 0 0 4 0 0 1 1 0 0.7502
5 4 0 5 0 0 4 0 0 1 5 1 0 0 0 5 0 0 1 1 0 0.7502
4 5 0 5 0 0 2 0 0 1 5 1 0 0 0 5 0 0 1 1 0 0.7502
5 4 0 5 0 0 1 0 0 1 5 1 0 0 0 4 0 0 1 1 0 0.7502
4 4 0 5 0 0 4 0 0 1 5 1 0 0 0 4 0 0 1 1 0 0.7502
4 5 0 5 0 0 2 0 0 1 2 1 0 0 0 2 0 0 1 1 0 0.7502
4 2 0 1 0 0 4 0 0 1 2 1 0 0 0 5 1 1 1 1 0 0.75
4 5 0 1 0 0 5 1 0 1 5 0 1 0 0 3 1 0 0 1 0 0.75
5 4 0 4 0 0 2 0 0 1 2 1 0 0 0 2 0 0 0 1 0 0.7498
5 5 0 4 0 0 2 0 0 1 5 1 0 0 0 4 0 0 0 1 0 0.7498
4 5 0 5 0 0 0 0 0 1 4 0 0 0 0 4 1 0 0 1 0 0.7498
5 4 0 5 0 0 5 0 0 1 5 1 0 0 0 4 0 0 0 1 0 0.7498
5 5 0 4 0 0 2 0 0 1 4 0 0 0 0 4 1 0 0 1 0 0.7498
5 5 0 4 0 0 5 0 0 1 5 0 0 0 0 5 1 0 0 1 0 0.7498
'''
# D-VAE
arcs_scores = '''
5 4 0 2 0 1 5 1 0 0 5 1 0 1 0 2 0 0 0 1 0 0.7516
4 1 0 2 0 1 2 1 0 0 5 1 0 1 0 2 0 0 0 1 0 0.7516
5 4 0 0 0 1 5 1 0 0 2 1 0 1 0 2 0 0 0 1 0 0.7516
3 4 0 5 0 0 4 1 0 1 2 0 0 0 0 0 1 0 0 0 0 0.7502
3 4 0 5 0 0 5 1 0 1 2 0 0 0 0 2 1 0 0 0 0 0.7502
3 0 0 2 0 0 5 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
3 2 0 5 0 0 4 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
1 5 0 5 0 0 4 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
0 3 0 2 0 0 1 1 0 1 0 0 0 0 0 5 1 0 0 0 0 0.7502
5 5 0 3 0 0 1 0 0 1 5 1 0 0 0 5 0 0 1 1 0 0.7502
1 5 0 2 0 0 5 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
3 1 0 2 0 0 2 1 0 1 2 0 0 0 0 2 1 0 0 0 0 0.7502
0 2 0 5 0 0 5 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
0 0 0 2 0 0 4 1 0 1 2 0 0 0 0 2 1 0 0 0 0 0.7502
3 0 0 4 0 0 0 1 0 1 0 0 0 0 0 2 1 0 0 0 0 0.7502
'''
enas_pos = '../software/enas/'
arcs = [x.split('.')[0][:-2] for x in arcs_scores.strip().split('\n')]
print(arcs)
scores = []
for arc in arcs:
print('Fully training ENAS architecture ' + arc)
save_appendix = ''.join(arc.split())
if not os.path.exists(enas_pos + 'outputs_' + save_appendix):
pwd = os.getcwd()
os.chdir(enas_pos)
os.system('CUDA_VISABLE_DEVICES={} ./scripts/custom_cifar10_macro_final_6.sh'.format(gpu_id) + ' "' + arc + '" ' + save_appendix)
os.chdir(pwd)
with open(enas_pos + 'outputs_' + save_appendix + '/stdout', 'r') as f:
last_line = f.readlines()[-1]
scores.append(last_line)
new_arcs_scores = [x+' '+y for x, y in zip(arcs_scores.strip().split('\n'), scores)]
new_arcs_scores = ''.join(new_arcs_scores)
print()
print('Fully trained architecture, WS acc, and test acc:')
print(new_arcs_scores)
print('Average score is {}'.format(np.mean([float(x.split()[1]) for x in scores])))
pdb.set_trace()
| 33.828947 | 137 | 0.576429 | 857 | 2,571 | 1.690782 | 0.098016 | 0.298137 | 0.192547 | 0.126984 | 0.601794 | 0.52588 | 0.478951 | 0.454796 | 0.410628 | 0.410628 | 0 | 0.457944 | 0.334111 | 2,571 | 75 | 138 | 34.28 | 0.388435 | 0.004278 | 0 | 0.066667 | 0 | 0.5 | 0.658975 | 0.025029 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.05 | 0 | 0.05 | 0.1 | 0 | 0 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
575a316cf01a5089aa362a824cd0d0ae8abdf3ee | 2,184 | py | Python | test/test_util.py | afourney/pyra | 245f9d4ce5db8810f4a2456afc64e7ba208484a1 | [
"BSD-2-Clause"
] | null | null | null | test/test_util.py | afourney/pyra | 245f9d4ce5db8810f4a2456afc64e7ba208484a1 | [
"BSD-2-Clause"
] | null | null | null | test/test_util.py | afourney/pyra | 245f9d4ce5db8810f4a2456afc64e7ba208484a1 | [
"BSD-2-Clause"
] | 1 | 2020-01-02T19:06:20.000Z | 2020-01-02T19:06:20.000Z | # Load what we actually need to run the tests
import unittest
import pyra.util as util
class TestUtils(unittest.TestCase):
def setUp(self):
pass
def test_binary_search(self):
s = range(1,100,2)
# Extermities
self.assertEqual( util.binary_search(s, -1), 0 )
self.assertEqual( util.binary_search(s, 0), 0 )
self.assertEqual( util.binary_search(s, 1), 0 )
self.assertEqual( util.binary_search(s, 2), 1 )
l = len(s)
self.assertEqual( util.binary_search(s, 101), l )
self.assertEqual( util.binary_search(s, 100), l )
self.assertEqual( util.binary_search(s, 99), l-1 )
self.assertEqual( util.binary_search(s, 98), l-1 )
# Inside
for i in range(0,len(s)):
# Things that are found
self.assertEqual( util.binary_search(s, s[i]), i )
# Things that are not found
self.assertEqual( util.binary_search(s, s[i]-1), i )
def test_galloping_search(self):
s = range(1,100,2)
# Extermities
self.assertEqual( util.galloping_search(s, -1), 0 )
self.assertEqual( util.galloping_search(s, 0), 0 )
self.assertEqual( util.galloping_search(s, 1), 0 )
self.assertEqual( util.galloping_search(s, 2), 1 )
l = len(s)
self.assertEqual( util.galloping_search(s, 101), l )
self.assertEqual( util.galloping_search(s, 100), l )
self.assertEqual( util.galloping_search(s, 99), l-1 )
self.assertEqual( util.galloping_search(s, 98), l-1 )
# Inside
for i in range(0,len(s)):
# Things that are found
self.assertEqual( util.galloping_search(s, s[i]), i )
# Things that are not found
self.assertEqual( util.galloping_search(s, s[i]-1), i )
# Hints
for i in range(0,len(s)):
for j in range(0,len(s)):
# Things that are found
self.assertEqual( util.galloping_search(s, s[i]), i, j )
# Things that are not found
self.assertEqual( util.galloping_search(s, s[i]-1), i, j )
| 33.090909 | 74 | 0.573718 | 301 | 2,184 | 4.076412 | 0.162791 | 0.268949 | 0.340668 | 0.273839 | 0.873676 | 0.873676 | 0.847596 | 0.709046 | 0.660147 | 0.643032 | 0 | 0.037549 | 0.304945 | 2,184 | 65 | 75 | 33.6 | 0.770751 | 0.105769 | 0 | 0.189189 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.594595 | 1 | 0.081081 | false | 0.027027 | 0.054054 | 0 | 0.162162 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
f51cb84743214c2f45bc804e995161b357beacc0 | 113 | py | Python | netbox_plugin_extensions/forms/model.py | DanSheps/netbox-plugin-extensions | 282f314fd301271f8bfa6620f4b9b15d4c93f59c | [
"Apache-2.0"
] | 6 | 2021-09-22T05:41:24.000Z | 2022-03-15T16:11:46.000Z | netbox_plugin_extensions/forms/model.py | DanSheps/netbox-plugin-extensions | 282f314fd301271f8bfa6620f4b9b15d4c93f59c | [
"Apache-2.0"
] | 3 | 2021-09-30T16:36:09.000Z | 2022-01-13T15:54:53.000Z | netbox_plugin_extensions/forms/model.py | DanSheps/netbox-plugin-extensions | 282f314fd301271f8bfa6620f4b9b15d4c93f59c | [
"Apache-2.0"
] | 3 | 2021-09-30T15:32:00.000Z | 2022-01-19T12:35:24.000Z | from extras.forms import CustomFieldModelForm
class PluginCustomFieldModelForm(CustomFieldModelForm):
pass
| 18.833333 | 55 | 0.849558 | 9 | 113 | 10.666667 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115044 | 113 | 5 | 56 | 22.6 | 0.96 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
f572c9af3e6754201c4e36324e1095604edfab5e | 5,052 | py | Python | S.py | mrzomby/Tool-s | 9a335feedbdc292ca866db63801f56493edc2084 | [
"Apache-2.0"
] | null | null | null | S.py | mrzomby/Tool-s | 9a335feedbdc292ca866db63801f56493edc2084 | [
"Apache-2.0"
] | null | null | null | S.py | mrzomby/Tool-s | 9a335feedbdc292ca866db63801f56493edc2084 | [
"Apache-2.0"
] | null | null | null | # Obfuscated by Py Compile
# Created by Mr.ZOMBY (https://github.com/mrzomby)
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b32decode("PCOELGGJSLW3Q4MGVNN53HSWPYEC7NGGXCYR3RQMAIQENAQEWFV2S36YKK2EEDW55OG3O7SBX6IZ2FJB3FTWAHNSDACJDSH474X7TW4PH67X5AHPX7APO24EZXH7B6PO4OLY7D75P673766Y3766L57XD757XDZPD4P3776VY6L66PT677B4PF7PH57P7Y6HZ7PX77GXO5775QDJH5GNRDC774FK555376AVP7X2357774KRKXE6QUNJTQOROU24SSF5DNE62PIGLXZRMVWOJMJR3ZTXZS6JJVKF2USG3IZ3A6WQY3DTVH6YCG3G7PF2OBD7F3TV4UKZCD3TKZU7NS3Q4VDF3V56HBUYWHLL3GYTH5P2PST5EJNSUVFNQ3IFEFLRJBLVXRCJC5IIWPTOBQSVKOETWDB5XRSHE5PJM55U3C2F6Y7AJ3GXEHNUDPBLZX4TWVRPJMOTLCGU3LNKNNSRCQYTPO4VDGU43NNFLHBZLM3K72NMLNUQK3ASJQVUCHHPBL2YYORSGIIVM7RX4VNMYTPDPGO2SHJK7DMZO2TSFZDIQG36ZK5EYRYYCDVTKIHBOZXCT33TIFNG3HHBNFUQMUHSTA7X5YXFIUVDE27K4UHTIEWZSKHBAB3UV7JAFC6QGRVUYUUCGEYZWQ42OJF4WV43NC74JBD6HL3MLMW5GU3W3RUKMTSXFDWC3N7V5T23LT5WGPCFL4VIQ7BQ2PLQXRI3QZF5ZIUIRKSVCRSUOVRVRXTVNFS6U3PXLEFQJ2J3ZOL4B4CPFAHQ4XQNFGG3JPDQXEMNGYXWJHHYMHKWC5A6TKFXNBRGFXDOVFJXP765Q37BMVS2PSHV7KJJ3RK4WUR6DWE7IXOJ3FEW7K6OL4LZ4M3POGFYMG5XWVWJ4RECJJ3MZ5DJCZBZ2UODLO7VCHOUKB2OXYYWW243UULJSJM5UOJPQUE7FCVXTW3FOTON26AU4N5YRPCCG3YZREXZK2VGIDEOO2VK7HLNYASXPNCDHXDESXKIKS3LDHMJERYWUVVYCCCKHU6D6Y5DVVUST5VHXXVC4KYZ7HDBG2VLK272K7ND2GAVQ7SGELUWV2WA2OSKH6Z5VO5TYGVDVKBZPSF5MRYRWYM62KKB7PZKZXX42NVFZ7FVVI2XDKHTOJNXAKEGFKHTKAUXUOPKM3KKQWY2H2LD7HVV3T5YTPY4JR4LURZ5LWE4DVH4IGCLC2CSU4UJFOCIASJRVVZNOYTMIGJJCA6UBCHW7RVCB3DF6BIRSBL7PYNFKHRXOFHM34322KIRRLLHTEXJJYT4JDFLC6VFEPZPOV6EP3ZUVKB5HAHHFRB2H5NSJ3QH3RB3TKC43KTNOUIUEWSXPJWELN7LW2TY3T4FY7GLY3DVXYG7OZFICSOEQRJRXKR3R6ZYHJ5YLHQXZTFK3Z6QVIGZDY6SEGOMQLVVV4CQWWKQMRHQ4UN5SAB3QVO2PA263IAZVFUVRI3DU2XLX2XLXFMHZONGEP5HHUXWLHEHAAMP3654BCXETV4FA5XEUQ4FFTJ64SDEZHKL345MOZFPZCNJFIVN3SFFHAUEVHKDUDHXT3RGY7GKZTFUM33XHORQUVRWTFFUNRFUBJDCGNF4OO5MPSNTXSY7IZCKRXPHYUIIJ24KSPSG2YME3BPJPIE2SFLSIHSLU55EJ3ODOLBSH7FUHTIYDPD6GFRTXOWW54Z4XYPOMIQRASBD2I4TCDI2RHEE4OSPHAWSEHECYUEMTXKNQVWSWHS3JQXX635OPMFYKO2472YIWKTPHTMBLT6TYHRTT52M35TUOVJKSVSXY5YTKGYEPGNRUHDHK36EIMLVI7JXMNUK5MO6Z3TAA66UU5Y6G7V2KC36RVGTIIJ6VQ3A2TEJTP3SSQ5CJVVXMYWXIJP7V3S4JZLI2VP4BDTHGYLXVL3LLMI7RRTX4WK7Y3VJNUDDU2IKWIM4G2CV4PVWMLOT4TGWKDLTVLBL3S7B526PBTU6XOWRXMF46TZIVP434NRGZ4OJNT622IU3OPESHIJXQVJ57OARTSZBTWNNWMZZAQ4X7D43XTSFGJ7MF2JXJ7PKG6ZVSGQESMSH2ENGFR4SQLAAPPB5GQ5D2EOFG6PMDEKE3HFPY3FR33GOM7YC3YGAMJ47KPNQHVBSE3QPNKQ6VURFKM4LZPSWSTUX5GZEYKMGRGB37QWHU3WNEJHESVLD2DHKBHYPLBV5JKTMYKZMY5HNOC2ONRJYAO5X3JJONID4TC26VJXZXHK3BK2IKQKFGATUID2JORU65FILUOGGEKJXIKD2UT3ZZNOBQHBZLDVBECONTSWE263I6AX5L4PVTFLSVJ3MD6KLFIIIUGKBF5OPBA2GWOZR6UYHQMJXEJXG4BEZ5CZEIQ4ZDKMPGNNLENYW2XCD2Y63TEOXTXU4BR65JZOQRZUOGQIMUXV36YBWPTWU4PLCEUXUE4ZVAKPNFXJTSXJGRACVFKQ2VCZQZ4SROIFXOCZNQQBNLI2C26TJLUDH2NPORVFUNEZAUCOY6GCLHHKT333HV2NNDJL6RWCDICQKUREHTSRO3V2OPM5BUU4LUZZXO2WPB4OAU5JICTZDFB5X4XB3332CXEFODI4UDW75TSNT36X4JTYS32RPOOC7CHHAIEYX2IEMTA5YRI4KOKEP5XVD46SXZQ3NBIPZWJWC4WGOI6MFWADNMQIKL3EWWCDSUIG7DLASYHAMOXFRPAQHK4C7HJTNHRDCDF3OZVNBKGF4O2DXCHTVK4KICP2UEBHBZL5SPGTH3VZXQC37S4M5T2H3PCQXFGD3RZS3LKUQSP7VDO4MPFKVN4NAGMOYRYPMOKXAADLF4A6KWGALCCGAW2ES5GKH43NAOJK62BHX4SQQJOHY6PDM4RCWAQBD5M3GQBAYKN7HE6LKL4ZGA6IZQ6PTP3AHXOPHAR35NIO64F5ZDNKG5XYPTTFQLOFZWTEG3V3USTXX4CPSV3PSDI7SDYAEXBAV7UOAIQMUNHXHXP7RT2EIFSOUGP5YQCRLYAFBDWAL7SI2YRY7JZCSYEAWBTVXZJLGBJJHYM7A2KVD3IFKZMVU54J4O62T2FWM2Z6SEEA5FA6OMD3NKF54S5TI56YVGWLPJBNAVOTAI6I7YPHWDBL2EGW76LBBOIFEIJ5U4YUNXIPXTNTYA52MHOFHE5NEODLZDTZRYZ4WMCB3URRUN7NF6ZFSTMK4PJA4A4IYECDVDQEKQRVOL2FUP3T5XJ5CD4DKCTCAHUTXPGKAJHV2CVQEYJZCQNIAZEDI5SONZJ262KBSGAVCSXJDXKYLSMFLUAJFTUXDAWK7RZJQSL2IRSQOD7ON5LOBNETJH53IOGOXOE4ABMRY7B4R5JLWJDUXGL4LYDOFAO4YHOMXFGS43GTUMIKT5KNJHDNCN6DEDCKZ3E653DBH4TNCNSPYPROWEHPVETFR4TNOLWFESJ5EKIAG7X34H7TOSLR6MAE7IBUSDAPEA4V4U5G2CTF6GBELZA53A6QEXOKX7WMVMVROWQ67HUI6GW3XLAKRZHPGM4TVV2ZROCEPHABQLCSTZMZDUIDPVWOJTGVUFAO2ZHZXAN7IZR3B57VXDF2FCKSGIXPDQKWPECFU6OCQXS46GTMFFQZ4HUENFSKTT4VRALF7LZYI5E2UY6BNO7OFXXUZ5IC4Q5EGD5NZVOZSXDN6QFJLZ3OXKLY5BOOCSCGNOBUGKILBT5PFTQTNIT4BEFVXZDGTDN7FZBBSS4NQYXBY3KHLPNYYZKRSHG47PGCNZIRXHW3UJVZUU56AAWEWIHFNZ7RU6KW3CVAIWS3J7LAJBZ23YXVQIEVGH52TEYTK6OU7E52NMTQOFNDCTL4L33GTO4ZIA62K2ZUL3THGETQOPGK2JWHGA3NL7ZZVQK3AYWUKNUW2LZLG4UZCG3TCDD2DZNSCDMQS6IATKAYAJHKZ4UKMJF3ZT3EIW44GHEXD2LS4KZ4CMU46XSI3ZK2XB6KZVTWSUFYTUNSZE6PLBNHCXHYIDTNLI7L3RAIORLYCZSNHJLEFNRPKXVXKANBG36OCB4WPZRSWO2BOY2ZQDBJMWA3N6FHANUV3H5VDGQZJH4XLMER4YDTSH2N6H3ZZMSYVBDTW65ZABDBJNC7A4QLDLWRGNGIEHJI6PSUG25CKADLAHN3QEVK6B6MFAO6FOE7GL6MR27BRITXLWO2ZBPJEP43PCYWIKTGYMNCXQIHCZP4WBICS4AUEDYPZYKIQSOOKPW4VFKML6J6VY3ZIJMKAWE6YXCJWOKYNNCOOJVFRQT3KVYYTBI4HYVMQWW3REA6G26BSUTZW3FOKUNOHHFF5QFHKMO7L5GQ5TUG3PF2EGMPLYDAUZTR4FMILHYRJPHOAO67GAFFNSYD6TAPTGBV5T6VG5PNA3KPUWN3IRTBKVIUSU6O7QPGAVFXQ6VCHEG4ADFWW4YYQ6KQRW7MYFEZ7PAVTIQXNZQBUUFEFCU3TIXSP6GCIB4MGJ3XDTZSV2QYNOVI22RBHTI57NBKQEX4AAWBBYSDIMKQXT37MXKPEHAOVOEGRM4HKHUM6RLWCHLI3VSOGO2F5WQ4DAV5NI5RLUGZADOFWZXTA6LZZFUOYTA462SNLYON2KLDAAGHWEDJQKAKNB32RETPWAAKQH2R2WBOSFIOF3YAH6F3WAPE5ENIY57TTGAVYPNB33YF6VXZCNS2SZX2EKHNVP75PKX35PR6HZ63L3D6ODJZ7PX63PH367H3PSJHFH577TXLPXZ675ZZXH5673QWIW4P6DZZPP76OTZ5P377O547RZKL577XRB7775JZPL67X5663Z5ZTB6R46LZZOX37H2EPT76N27XT7T7525XX6A3773YQ67747E7X56PSZ4775ZPYH56PPPZ2RXY34===")))) | 1,010.4 | 4,946 | 0.991884 | 24 | 5,052 | 208.791667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.191903 | 0.002573 | 5,052 | 5 | 4,946 | 1,010.4 | 0.80254 | 0.01445 | 0 | 0 | 0 | 0 | 0.982118 | 0.982118 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 10 |
1984b5af18543c4335539af782a71696dd12d5ef | 800 | py | Python | tests/fixtures/defxmlschema/chapter03/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | tests/fixtures/defxmlschema/chapter03/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | tests/fixtures/defxmlschema/chapter03/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | from tests.fixtures.defxmlschema.chapter03.chapter03prod2 import ColorType
from tests.fixtures.defxmlschema.chapter03.chapter03 import EnvelopeType
from tests.fixtures.defxmlschema.chapter03.chapter03ord import ItemsType
from tests.fixtures.defxmlschema.chapter03.chapter03ord import OrderType
from tests.fixtures.defxmlschema.chapter03.chapter03prod import ProdNumType
from tests.fixtures.defxmlschema.chapter03.chapter03prod import ProductType
from tests.fixtures.defxmlschema.chapter03.chapter03prod import SizeType
from tests.fixtures.defxmlschema.chapter03.chapter03prod2 import Color
from tests.fixtures.defxmlschema.chapter03.chapter03 import Envelope
from tests.fixtures.defxmlschema.chapter03.chapter03ord import Order
from tests.fixtures.defxmlschema.chapter03.chapter03prod import Product
| 66.666667 | 75 | 0.89 | 88 | 800 | 8.090909 | 0.227273 | 0.139045 | 0.26264 | 0.448034 | 0.867978 | 0.867978 | 0.867978 | 0 | 0 | 0 | 0 | 0.060847 | 0.055 | 800 | 11 | 76 | 72.727273 | 0.880952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
199b0c6194c48bb463776419c6f03852b1121647 | 81,167 | py | Python | 7_stg/model/visualization/viz_samples.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | 3 | 2020-10-23T02:53:11.000Z | 2021-03-12T11:04:37.000Z | 7_stg/model/visualization/viz_samples.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | null | null | null | 7_stg/model/visualization/viz_samples.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | 1 | 2021-07-28T08:38:05.000Z | 2021-07-28T08:38:05.000Z | import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from print_helper import conductance_to_value_exp, build_string, build_string_gen
import seaborn as sns
from matplotlib import lines
import matplotlib.gridspec as gridspec
import prinzdb
from print_helper import get_summ_stat_name, get_summ_stat_name_text, get_synapse_name, get_summ_stat_name_asterisk, scale_to_experimental
import sys
sys.path.append("../visualization")
import viz
from copy import deepcopy
import matplotlib.ticker
import matplotlib.patheffects as pe
def vis_sample(m, s, sample, hyperparams, t_on=None, t_off=None, with_ss=True, with_params=True,
mem_dimensions=None,mode2=None,
voltage_trace=None, time_len=None, fontscale=1.0, linescale=1.0, offset=0.0,
test_idx=None, case=None, title=None, date_today=None, counter=0, offset_labels=0.0, legend=True,
multiplier_cond_shift = 0.0, vis_legend=True, scale_bar=True,
ss_names=True, param_names=True, save_fig=False):
"""
Function of Kaan, modified by Michael. Used for plotting fig 5b Prinz.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
font_size = 15.0
if voltage_trace is None:
data = m.gen_single(sample)
else:
data = voltage_trace
Vx = data['data']
params = data['params']
stats = s.calc([data])[0]
stats_nan = deepcopy(stats)
stats[np.isnan(stats)] = 0.0
if hyperparams.include_plateau:
stats = stats[:-4]
stats = scale_to_experimental(stats)
bar_scaling_factors = [1.0, 10, 100, 10, 100, 1, 10000, 10000]
bar_scaling_factors = np.reshape(np.tile(bar_scaling_factors, 3), (3, 8))
bar_vals = bar_scaling_factors[np.asarray(hyperparams.use_membrane)]
if mem_dimensions is not None:
params_trunc = params[mem_dimensions].tolist()
params_trunc += params[-7:].tolist()
bar_vals = bar_vals[mem_dimensions]
params = np.asarray(params_trunc)
if with_params and with_ss:
fig = plt.figure(figsize=(11.3, 6))
gs = gridspec.GridSpec(2, 3, width_ratios=[len(stats), len(params[:-7]), len(params[-7:])],
wspace=0.25, height_ratios=[0.7, 0.3])
axV = plt.subplot(gs[0, :])
axss = plt.subplot(gs[1, 0])
axmemparams = plt.subplot(gs[1, 1])
axsynparams = plt.subplot(gs[1, 2])
elif with_params:
fig = plt.figure(figsize=(6, 7.5))
gs = gridspec.GridSpec(2, 2, width_ratios=[len(params[:-7]), len(params[-7:])],
hspace=0.1, wspace=0.38, height_ratios=[0.65, 0.35])
axV = plt.subplot(gs[0, :])
axmemparams = plt.subplot(gs[1, 0])
axsynparams = plt.subplot(gs[1, 1])
elif with_ss:
fig, (axV, axss) = plt.subplots(2, figsize=(14, 6))
else:
fig, axV = plt.subplots(1, figsize=(14, 3))
cols = ['#034e7b', '#0570b0', '#3690c0']
#cols = ['k', 'k', 'k']
current_col = 0
scale_bar_breadth = 1000.0
scale_bar_voltage_breadth = 50.0
if time_len is not None:
m.t = m.t * len(m.t) / time_len
scale_bar_breadth = scale_bar_breadth * len(m.t) / time_len
for j in range(len(prinzdb.neutypes)):
if time_len is not None:
axV.plot(m.t[10000+offset:10000+offset+time_len], Vx[j, 10000+offset:10000+offset+time_len] + 120.0 * (2 - j),
label=prinzdb.neutypes[j], lw=0.75, c='k', rasterized=True)
else:
axV.plot(m.t, Vx[j] + 120.0 * (2 - j), label=prinzdb.neutypes[j], lw=0.75, c='k')
current_col += 1
if scale_bar:
if mode2 == 'small':
axV.plot(10860 + np.arange(scale_bar_breadth), 318 * np.ones_like(np.arange(scale_bar_breadth)), lw=1.0,
color='k', zorder=5, rasterized=True)
axV.text(10905, 324, '1 sec', fontsize=font_size)
import matplotlib.patches as patches
rect = patches.Rectangle((11890, 234), 2000, 100, linewidth=1, facecolor='w', zorder=3)
axV.add_patch(rect)
axV.plot(13490 * np.ones_like(np.arange(scale_bar_voltage_breadth)),
318 - scale_bar_voltage_breadth + np.arange(scale_bar_voltage_breadth), lw=1.0, color='k', zorder=6, rasterized=True)
axV.text(11770, 270, '50 mV', fontsize=font_size)
else:
axV.plot(10860 + np.arange(scale_bar_breadth), 318 * np.ones_like(np.arange(scale_bar_breadth)), lw=1.0,
color='k', rasterized=True)
axV.text(10905, 324, '1 sec', fontsize=font_size)
import matplotlib.patches as patches
rect = patches.Rectangle((10900, 264), 700, 50, linewidth=1, facecolor='w', zorder=3)
axV.add_patch(rect)
axV.plot(11860 * np.ones_like(np.arange(scale_bar_voltage_breadth)),
318 - scale_bar_voltage_breadth + np.arange(scale_bar_voltage_breadth), lw=1.0, color='k')
axV.text(10930, 270, '50 mV', fontsize=font_size)
if not legend and vis_legend:
if mode2=='small':
axV.text(-0.15, 0.75, 'AB/PD', fontsize=font_size, transform=axV.transAxes)
axV.text(-0.1, 0.45, 'LP', fontsize=font_size, transform=axV.transAxes)
axV.text(-0.1, 0.15, 'PY', fontsize=font_size, transform=axV.transAxes)
else:
axV.text(-1540+offset_labels, 220, 'AB/PD', fontsize=font_size)
axV.text(-1050+offset_labels, 95, 'LP', fontsize=font_size)
axV.text(-1080+offset_labels, -30, 'PY', fontsize=font_size)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
if legend: axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
ncol=len(prinzdb.neutypes), fontsize=font_size*fontscale)
axV.xaxis.set_tick_params(labelsize=font_size*fontscale)
axV.yaxis.set_tick_params(labelsize=font_size*fontscale)
axV.spines['left'].set_linewidth(2.0 * linescale)
axV.spines['bottom'].set_linewidth(2.0 * linescale)
col1 = 'r'
col2 = 'r'
col3 = 'r'
if with_params:
lticks = np.arange(len(params[:-7]))
width = 0.35*linescale # the width of the bars
axmemparams.bar(lticks + width / 2, bar_vals * params[:-7] / 0.628e-3, width,
bottom=min(1e-8, np.min(params[:-7])), color='k')
names = viz.get_labels(hyperparams, mathmode=True, include_q10=False)[:-7]
if mem_dimensions is not None: names = names[mem_dimensions]
axmemparams.set_ylim((0, 700))
# axmemparams.set_ylabel('Membrane', fontsize=font_size)
axmemparams.set_xticks(lticks + width / 2)
if param_names:
axmemparams.set_xticklabels(names, rotation='vertical', fontsize=font_size*fontscale)
else:
axmemparams.axes.get_xaxis().set_visible(False)
axmemparams.axes.get_yaxis().set_visible(False)
axmemparams.xaxis.set_tick_params(labelsize=font_size*fontscale)
axmemparams.yaxis.set_tick_params(labelsize=font_size*fontscale)
small_offset = [0.00, -0.0, -0.0, 0.0, -0.00, 0.0]
font_decrease = 1.7
mode = '13D'
if mode == '13D':
if mode2 == 'small':
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -360, 'x', fontsize=font_size / 2)
axmemparams.set_ylim((0, 700)) # 850
small_offset = [0.15, -0.04, -0.1, 0.0, -0.02, 0.0]
for i in range(6):
if i == 2 or i == 3 or i == 4 or i == 5: # -620
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -410,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -410, r'$%s$' % str(int(bar_vals[i])),
fontsize=font_size / font_decrease)
axmemparams.text(0.11, -0.73, r'Membrane $\mathregular{\bar g}$', fontsize=font_size,
transform=axmemparams.transAxes)
axmemparams.text(0.22, -0.85, '[mS/cm' + chr(0x00b0 + 2) + ']', fontsize=font_size,
transform=axmemparams.transAxes)
else:
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -390, 'x', fontsize=font_size / 2)
axmemparams.set_ylim((0, 600)) # 850
small_offset = [0.15, -0.04, -0.1, 0.0, -0.02, 0.0]
for i in range(6):
if i == 2 or i == 3 or i == 4 or i == 5: # -620
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -450,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -450, r'$%s$' % str(int(bar_vals[i])),
fontsize=font_size / font_decrease)
axmemparams.text(0.11, -0.95, r'Membrane $\mathregular{\bar g}$', fontsize=font_size,
transform=axmemparams.transAxes)
axmemparams.text(0.22, -1.12, '[mS/cm' + chr(0x00b0 + 2) + ']', fontsize=font_size,
transform=axmemparams.transAxes)
else:
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -470, 'x', fontsize=font_size / 2)
for i in range(6):
if i == 0 or i == 1 or i == 2 or i == 3 or i == 4 or i == 5: # -620
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -530,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -530, r'$%s$' % str(int(bar_vals[i])),
fontsize=font_size / font_decrease)
lticks = np.arange(len(params[-7:]))
axsynparams.bar(lticks + width / 2, params[-7:] * 1e-3, width,
bottom=min(1e-8 * 1e-3, np.min(params[-7:] * 1e-3)), color='k')
if mode2 == 'small':
axsynparams.text(0.22, -0.73, r'Synaptic $\mathregular{\bar g}$', fontsize=font_size,
transform=axsynparams.transAxes)
axsynparams.text(0.4, -0.85, '[nS]', fontsize=font_size, transform=axsynparams.transAxes)
else:
axsynparams.text(0.22, -0.95, r'Synaptic $\mathregular{\bar g}$', fontsize=font_size,
transform=axsynparams.transAxes)
axsynparams.text(0.4, -1.12, '[nS]', fontsize=font_size, transform=axsynparams.transAxes)
names = viz.get_labels(hyperparams, include_q10=False)[-7:]
# axsynparams.set_ylabel('Synapses', fontsize=font_size)
axsynparams.set_yscale('log')
axsynparams.set_ylim((1e-8 * 1e-3, 1e-3 * 1e-3))
axsynparams.set_xticks(lticks + width / 2)
axsynparams.set_yticks([1e-11, 1e-9, 1e-7])
if param_names:
axsynparams.set_xticklabels(names, rotation='vertical', fontsize=font_size*fontscale)
else:
axsynparams.axes.get_xaxis().set_visible(False)
axsynparams.axes.get_yaxis().set_visible(False)
axsynparams.xaxis.set_tick_params(labelsize=font_size*fontscale)
axsynparams.yaxis.set_tick_params(labelsize=font_size*fontscale)
axsynparams.spines['left'].set_linewidth(2.0 * linescale)
axsynparams.spines['bottom'].set_linewidth(2.0 * linescale)
axmemparams.spines['left'].set_linewidth(2.0 * linescale)
axmemparams.spines['bottom'].set_linewidth(2.0 * linescale)
if with_ss:
lticks = np.arange(len(stats))
width = 0.35 # the width of the bars
#stats[8:] *= 2000
axss.bar(lticks + width / 2, stats, width, color='k')
nan_pos = np.where(np.isnan(stats_nan))[0]
axss.scatter(nan_pos + width / 2, 50 * np.ones_like(nan_pos), c='b', s=70.0, zorder=2, marker='x')
# add some text for labels, title and axes ticks
names = []
for num in range(15):
names.append(get_summ_stat_name(num))
# axss.set_ylabel('Summary Statistics', fontsize=font_size)
axss.set_xticks(lticks + width / 2)
if ss_names:
axss.set_xticklabels(names, rotation='vertical', fontsize=font_size*fontscale)
else:
axss.axes.get_xaxis().set_visible(False)
axss.axes.get_yaxis().set_visible(False)
axss.xaxis.set_tick_params(labelsize=font_size*fontscale)
axss.yaxis.set_tick_params(labelsize=font_size*fontscale)
axss.set_ylim([-4, 4])
axss.set_yticks([-4, -2, 0, 2, 4])
axss.set_yticklabels([r'$-4 \sigma$', r'$-2 \sigma$', '0', '$2 \sigma$', '$4 \sigma$'])
axss.text(0.27, -0.95, 'Summary statistics', fontsize=font_size, transform=axss.transAxes)
axss.text(0.145, -1.12, '[st. dev. of samples]', fontsize=font_size, transform=axss.transAxes)
axss.spines['right'].set_visible(False)
axss.spines['top'].set_visible(False)
#axss.axes.get_yaxis().set_ticks([])
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axsynparams.spines['right'].set_visible(False)
axsynparams.spines['top'].set_visible(False)
axmemparams.spines['right'].set_visible(False)
axmemparams.spines['top'].set_visible(False)
sns.set(style="ticks", font_scale=1)
sns.despine()
axV.set_title('')
if save_fig:
plt.savefig(
'png/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.png'.format(test_idx[0],
counter),
bbox_inches='tight', dpi=500)
plt.savefig(
'svg/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.svg'.format(test_idx[0],
counter),
bbox_inches='tight')
return fig
def vis_sample_plain(m, s, sample, axV=None, t_on=None, t_off=None, col=['k', 'k', 'k'], print_label=False,
voltage_trace=None, time_len=None, fontscale=1.0, linescale=1.0, offset=0, scale_bar=True,
test_idx=None, case=None, title=None, date_today=None, counter=0, legend=True,
save_fig=False):
"""
Function of Kaan, modified by Michael. Used for plotting fig 5b Prinz.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
if axV is None:
_, ax = plt.subplots(1, len(sample), figsize=(6*len(sample),6))
font_size = 15.0
current_counter = 0
dt = m.t[1] - m.t[0]
scale_bar_breadth = 500
scale_bar_voltage_breadth = 50
offscale = 100
offvolt = -50
if scale_bar: scale_col = 'k'
else: scale_col = 'w'
for current_sample in sample:
if axV is None: axV = ax[current_counter]
if voltage_trace is None:
data = m.gen_single(current_sample)
else:
data = voltage_trace
Vx = data['data']
params = data['params']
current_col = 0
for j in range(len(prinzdb.neutypes)):
if time_len is not None:
axV.plot(m.t[10000+offset:10000+offset+time_len:5], Vx[j, 10000+offset:10000+offset+time_len:5] + 140.0 * (2 - j),
label=prinzdb.neutypes[j], lw=0.3, c=col)
else:
axV.plot(m.t, Vx[j] + 120.0 * (2 - j), label=prinzdb.neutypes[j], lw=0.3, c=col[current_col])
current_col += 1
if print_label:
axV.plot([1100.0 + (offset - 26500) * (m.t[1] - m.t[0])], [300], color=col, marker='o',
markeredgecolor='w', ms=8,
markeredgewidth=1.0, path_effects=[pe.Stroke(linewidth=1.3, foreground='k'), pe.Normal()])
if scale_bar:
# time bar
axV.plot((offset+5500)*dt+offscale + np.arange(scale_bar_breadth)[::scale_bar_breadth - 1],
(-40+offvolt) * np.ones_like(np.arange(scale_bar_breadth))[::scale_bar_breadth - 1],
lw=1.0, color='w')
# voltage bar
axV.plot(
(2850 + offset*dt + offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
275 + np.arange(scale_bar_voltage_breadth)[::scale_bar_voltage_breadth - 1],
lw=1.0, color=scale_col, zorder=10)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
#if legend: axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
# ncol=len(prinzdb.neutypes), fontsize=font_size*fontscale)
#axV.xaxis.set_tick_params(labelsize=font_size*fontscale)
#axV.yaxis.set_tick_params(labelsize=font_size*fontscale)
#axV.spines['left'].set_linewidth(2.0 * linescale)
#axV.spines['bottom'].set_linewidth(2.0 * linescale)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axV.spines['bottom'].set_visible(False)
axV.spines['left'].set_visible(False)
#sns.set(style="ticks", font_scale=1)
#sns.despine()
if save_fig:
plt.savefig(
'../../thesis_results/pdf/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.pdf'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/png/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.png'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/svg/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.svg'.format(test_idx[0],
counter),
bbox_inches='tight')
current_counter += 1
def vis_sample_plain_31DSynthetic(m, s, sample, axV=None, t_on=None, t_off=None, col=['k', 'k', 'k'], print_label=False,
voltage_trace=None, time_len=None, fontscale=1.0, linescale=1.0, offset=0, scale_bar=True,
test_idx=None, case=None, title=None, date_today=None, counter=0, legend=True, draw_patch=False,
save_fig=False):
"""
Function of Kaan, modified by Michael. Used for plotting fig 5b Prinz.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
if axV is None:
_, ax = plt.subplots(1, len(sample), figsize=(6*len(sample),6))
font_size = 15.0
current_counter = 0
dt = m.t[1] - m.t[0]
scale_bar_breadth = 500
scale_bar_voltage_breadth = 50
offscale = 100
offvolt = -50
if scale_bar: scale_col = 'k'
else: scale_col = 'w'
for current_sample in sample:
if axV is None: axV = ax[current_counter]
if voltage_trace is None:
data = m.gen_single(current_sample)
else:
data = voltage_trace
Vx = data['data']
params = data['params']
current_col = 0
for j in range(len(prinzdb.neutypes)):
if time_len is not None:
axV.plot(m.t[10000+offset:10000+offset+time_len:5], Vx[j, 10000+offset:10000+offset+time_len:5] + 140.0 * (2 - j),
label=prinzdb.neutypes[j], lw=0.3, c=col)
else:
axV.plot(m.t, Vx[j] + 120.0 * (2 - j), label=prinzdb.neutypes[j], lw=0.3, c=col[current_col])
current_col += 1
if print_label:
axV.plot([1100.0 + (offset - 26500) * (m.t[1] - m.t[0])], [300], color=col, marker='o',
markeredgecolor='w', ms=8,
markeredgewidth=1.0, path_effects=[pe.Stroke(linewidth=1.3, foreground='k'), pe.Normal()])
if draw_patch:
import matplotlib.patches as patches
rect = patches.Rectangle((1650 + offscale, 266), 200, 65, linewidth=1, facecolor='w', zorder=3)
axV.add_patch(rect)
if scale_bar:
# time bar
axV.plot((offset+5500)*dt+offscale + np.arange(scale_bar_breadth)[::scale_bar_breadth - 1],
(-40+offvolt) * np.ones_like(np.arange(scale_bar_breadth))[::scale_bar_breadth - 1],
lw=1.0, color='w')
# voltage bar
axV.plot(
(2850 + offset*dt + offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
275 + np.arange(scale_bar_voltage_breadth)[::scale_bar_voltage_breadth - 1],
lw=1.0, color=scale_col, zorder=10)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
#if legend: axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
# ncol=len(prinzdb.neutypes), fontsize=font_size*fontscale)
#axV.xaxis.set_tick_params(labelsize=font_size*fontscale)
#axV.yaxis.set_tick_params(labelsize=font_size*fontscale)
#axV.spines['left'].set_linewidth(2.0 * linescale)
#axV.spines['bottom'].set_linewidth(2.0 * linescale)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axV.spines['bottom'].set_visible(False)
axV.spines['left'].set_visible(False)
#sns.set(style="ticks", font_scale=1)
#sns.despine()
if save_fig:
plt.savefig(
'../../thesis_results/pdf/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.pdf'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/png/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.png'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/svg/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.svg'.format(test_idx[0],
counter),
bbox_inches='tight')
current_counter += 1
def vis_sample_plain_bit_more(m, s, sample, axV=None, t_on=None, t_off=None, col=['k', 'k', 'k'],
voltage_trace=None, time_len=None, fontscale=1.0, linescale=1.0, offset=0, scale_bar=False,
test_idx=None, case=None, title=None, date_today=None, counter=0, legend=True,
save_fig=False):
"""
Function of Kaan, modified by Michael. Used for plotting fig 5b Prinz.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
if axV is None:
_, ax = plt.subplots(1, len(sample), figsize=(6*len(sample),6))
font_size = 8.0
current_counter = 0
dt = m.t[1] - m.t[0]
for current_sample in sample:
if axV is None: axV = ax[current_counter]
if voltage_trace is None:
data = m.gen_single(current_sample)
else:
data = voltage_trace
Vx = data['data']
params = data['params']
current_col = 0
for j in range(len(prinzdb.neutypes)):
if time_len is not None:
axV.plot(m.t[10000+offset:10000+offset+time_len], Vx[j, 10000+offset:10000+offset+time_len] + 140.0 * (2 - j),
label=prinzdb.neutypes[j], lw=0.3, c=col[current_col])
else:
axV.plot(m.t, Vx[j] + 120.0 * (2 - j), label=prinzdb.neutypes[j], lw=0.1, c=col[current_col], rasterized=True)
current_col += 1
label_col = 'w'
axV.text(-0.035, 0.75, 'AB/PD', fontsize=font_size, c=label_col, transform=axV.transAxes)
axV.text(-0.028, 0.45, 'LP', fontsize=font_size, c=label_col, transform=axV.transAxes)
axV.text(-0.03, 0.15, 'PY', fontsize=font_size, c=label_col, transform=axV.transAxes)
axV.plot([1000.0+(offset-26500)*(m.t[1]-m.t[0])], [314], color=col[0], marker='o', markeredgecolor='w', ms=8,
markeredgewidth=1.0, path_effects=[pe.Stroke(linewidth=1.3, foreground='k'), pe.Normal()])
scale_bar_breadth = 500
scale_bar_voltage_breadth = 50
offscale=100
offvolt =-50
if scale_bar:
# time bar
axV.plot((offset+5500)*dt+offscale + np.arange(scale_bar_breadth)[::scale_bar_breadth - 1],
(-40+offvolt) * np.ones_like(np.arange(scale_bar_breadth))[::scale_bar_breadth - 1], lw=1.0, color='k')
axV.text((offset+5500)*dt+offscale, -125, '500 ms', c=label_col, fontsize=font_size)
import matplotlib.patches as patches
rect = patches.Rectangle((4400+offscale, 296+offvolt), 500, 50, linewidth=1, facecolor='w', zorder=3)
axV.add_patch(rect)
# voltage bar
axV.plot((4810+offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
-70 + np.arange(scale_bar_voltage_breadth)[
::scale_bar_voltage_breadth - 1]
, lw=1.0, color='w', zorder=10)
axV.plot(
(4710 + offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
275 + np.arange(scale_bar_voltage_breadth)[
::scale_bar_voltage_breadth - 1]
, lw=1.0, color='k', zorder=10)
axV.text(5000, -70, '50 mV', c=label_col, fontsize=8.0)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
if legend: axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
ncol=len(prinzdb.neutypes), fontsize=font_size*fontscale)
axV.xaxis.set_tick_params(labelsize=font_size*fontscale)
axV.yaxis.set_tick_params(labelsize=font_size*fontscale)
axV.spines['left'].set_linewidth(2.0 * linescale)
axV.spines['bottom'].set_linewidth(2.0 * linescale)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axV.spines['bottom'].set_visible(False)
axV.spines['left'].set_visible(False)
axV.set_title('')
if save_fig:
plt.savefig(
'../../thesis_results/pdf/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.pdf'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/png/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.png'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/svg/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.svg'.format(test_idx[0],
counter),
bbox_inches='tight')
current_counter += 1
def vis_sample_plain_bit_more_31DSynthetic(m, s, sample, axV=None, t_on=None, t_off=None, col=['k', 'k', 'k'],
voltage_trace=None, time_len=None, fontscale=1.0, linescale=1.0, offset=0, scale_bar=False,
test_idx=None, case=None, title=None, date_today=None, counter=0, legend=True,
save_fig=False):
"""
Function of Kaan, modified by Michael. Used for plotting fig 5b Prinz.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
if axV is None:
_, ax = plt.subplots(1, len(sample), figsize=(6*len(sample),6))
font_size = 8.0
current_counter = 0
dt = m.t[1] - m.t[0]
for current_sample in sample:
if axV is None: axV = ax[current_counter]
if voltage_trace is None:
data = m.gen_single(current_sample)
else:
data = voltage_trace
Vx = data['data']
params = data['params']
current_col = 0
for j in range(len(prinzdb.neutypes)):
if time_len is not None:
axV.plot(m.t[10000+offset:10000+offset+time_len], Vx[j, 10000+offset:10000+offset+time_len] + 140.0 * (2 - j),
label=prinzdb.neutypes[j], lw=0.3, c=col[current_col])
else:
axV.plot(m.t, Vx[j] + 120.0 * (2 - j), label=prinzdb.neutypes[j], lw=0.1, c=col[current_col], rasterized=True)
current_col += 1
label_col = 'w'
#axV.text(-0.035, 0.75, 'AB/PD', fontsize=font_size, c=label_col, transform=axV.transAxes)
#axV.text(-0.028, 0.45, 'LP', fontsize=font_size, c=label_col, transform=axV.transAxes)
#axV.text(-0.03, 0.15, 'PY', fontsize=font_size, c=label_col, transform=axV.transAxes)
#axV.plot([1000.0+(offset-26500)*(m.t[1]-m.t[0])], [314], color=col[0], marker='o', markeredgecolor='w', ms=8,
# markeredgewidth=1.0, path_effects=[pe.Stroke(linewidth=1.3, foreground='k'), pe.Normal()])
scale_bar_breadth = 500
scale_bar_voltage_breadth = 50
offscale=100
offvolt =-50
if scale_bar:
draw_col = 'k'
else:
draw_col = 'w'
# time bar
axV.plot((offset+5500)*dt+offscale + np.arange(scale_bar_breadth)[::scale_bar_breadth - 1],
(-40+offvolt) * np.ones_like(np.arange(scale_bar_breadth))[::scale_bar_breadth - 1], lw=1.0, color=draw_col)
# voltage bar
axV.plot((offset*dt)+(3100+offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
-70 + np.arange(scale_bar_voltage_breadth)[
::scale_bar_voltage_breadth - 1]
, lw=1.0, color='w', zorder=10)
axV.plot((offset*dt)+(3100 + offscale) * np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth - 1],
275 + np.arange(scale_bar_voltage_breadth)[
::scale_bar_voltage_breadth - 1]
, lw=1.0, color=draw_col, zorder=10)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
if legend: axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
ncol=len(prinzdb.neutypes), fontsize=font_size*fontscale)
axV.xaxis.set_tick_params(labelsize=font_size*fontscale)
axV.yaxis.set_tick_params(labelsize=font_size*fontscale)
axV.spines['left'].set_linewidth(2.0 * linescale)
axV.spines['bottom'].set_linewidth(2.0 * linescale)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axV.spines['bottom'].set_visible(False)
axV.spines['left'].set_visible(False)
axV.set_title('')
if save_fig:
plt.savefig(
'../../thesis_results/pdf/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.pdf'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/png/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.png'.format(test_idx[0],
counter),
bbox_inches='tight')
plt.savefig(
'../../thesis_results/svg/' + date_today + '_sample_prinz_plain_' + case + '_{}_{}.svg'.format(test_idx[0],
counter),
bbox_inches='tight')
current_counter += 1
def vis_sample_subfig(m, s, sample, hyperparams, stats=None, t_on=None, t_off=None, with_ss=True, with_params=True, voltage_trace=None,
test_idx=None, case=None, title=None, date_today=None, counter=0, save_fig=False, legend_offset=0.0,
axV=None, axss=None, axmemparams=None, axsynparams=None, max_stats=None, min_stats=None,
mem_dimensions=None, mode='13D', mode_for_membrane_height=None,
stat_mean=None, stat_std=None, scale_bar=True, stat_scale=None, current_col='g',
max_conds=None, min_conds=None, legend=True, ss_names=True, param_names=True):
"""
Based on vis_sample. Is called when the pdf should be shown next ot the sample.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
# Hyperparameters for plotting
font_size=15.0 # fontsize of the labels
col_bar = 'k' # color of the bars for summstats and conductances
col_minmax = 'k' # color of the horizontal line indicating the max and min value of summstats and conds
col_shade = 'k' # color of the shade between the max and min values
values_each = 100 # not so important. How many values we evaluate for the max and min values
indicator_fraction = 0.8 # breath of the horizontal bars for max and min, should be within [0,1]
opacity = 0.5 # opacity of the shade
width = 0.35 # the width of the bars
neuron_labels = ['AB/PD', 'LP', 'PY'] # labels for the legends
scale_bar_breadth = 1000
scale_bar_voltage_breadth = 50
if voltage_trace is None: data = m.gen_single(sample)
else: data = voltage_trace
Vx = data['data']
params = data['params']
#stats = s.calc([data])[0]
stats_nan = deepcopy(stats)
#stats[np.isnan(stats)]=0.0
#stats = scale_to_experimental(stats)
bar_scaling_factors = [1.0, 10, 100, 10, 100, 1, 10000, 10000]
bar_scaling_factors = np.asarray([[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000]])
bar_vals = bar_scaling_factors[np.asarray(hyperparams.use_membrane)]
if mem_dimensions is not None:
params_trunc = params[mem_dimensions].tolist()
params_trunc += params[-7:].tolist()
bar_vals = bar_vals[mem_dimensions]
params = np.asarray(params_trunc)
step_Vtrace = 10
if legend:
for j in range(len(prinzdb.neutypes)):
axV.plot(m.t[25500:25500+200000:step_Vtrace], Vx[j,25500:25500+200000:step_Vtrace]+140.0*(2-j), label=neuron_labels[j])
else:
for j in range(len(prinzdb.neutypes)):
axV.plot(m.t[25500:25500+200000:step_Vtrace], Vx[j,25500:25500+200000:step_Vtrace]+140.0*(2-j), label=neuron_labels[j], c='k', lw=0.6)
if scale_bar:
axV.plot(4810+np.arange(scale_bar_breadth)[::scale_bar_breadth-1], 358*np.ones_like(np.arange(scale_bar_breadth))[::scale_bar_breadth-1], lw=1.0, color='k')
axV.text(4845, 364, '1 sec', fontsize=font_size)
import matplotlib.patches as patches
rect = patches.Rectangle((5400, 296), 500, 50, linewidth=1, facecolor='w', zorder=3)
axV.add_patch(rect)
axV.plot(5810*np.ones_like(np.arange(scale_bar_voltage_breadth))[::scale_bar_voltage_breadth-1],
358-scale_bar_voltage_breadth+np.arange(scale_bar_voltage_breadth)[::scale_bar_voltage_breadth-1]
, lw=1.0, color='k',zorder=10)
axV.text(5430, 310, '50 mV', fontsize=font_size)
axV.plot(m.t[22500]+20, 325, color=current_col, marker='o', markeredgecolor='w', ms=22,
markeredgewidth=0.5)
if not legend:
axV.text(-0.08, 0.75, 'AB/PD', fontsize=font_size, transform=axV.transAxes)
axV.text(-0.04, 0.45, 'LP', fontsize=font_size, transform=axV.transAxes)
axV.text(-0.045, 0.15, 'PY', fontsize=font_size, transform=axV.transAxes)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
if legend:
if scale_bar:
axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=len(prinzdb.neutypes), fontsize=font_size)
else:
axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
ncol=len(prinzdb.neutypes), fontsize=font_size)
axV.xaxis.set_tick_params(labelsize=font_size)
axV.yaxis.set_tick_params(labelsize=font_size)
#axV.set_xlim((m.t[25500] - 400, 11500))
axV.set_xlim((m.t[25500] - 200, m.t[25500+200000]+200))
if scale_bar:
axV.set_ylim((-95, 360))
if with_params:
lticks = np.arange(len(params[:-7]))
end_of_time_axis = len(params[:-7]) - 1 + width
full_time = np.linspace(width/2-0.5, end_of_time_axis+0.5-width/2, values_each * len(params[:-7]))
full_min_conds = np.tile(bar_vals * min_conds[:-7] / 0.628e-3, (values_each, 1))
full_min_conds = full_min_conds.flatten(order='F')
full_max_conds = np.tile(bar_vals * max_conds[:-7] / 0.628e-3, (values_each, 1))
full_max_conds = full_max_conds.flatten(order='F')
axmemparams.bar(lticks + width / 2, bar_vals * params[:-7] / 0.628e-3, width,
bottom=min(1e-8, np.min(params[:-7])), color=col_bar)
#min_conds_scaled = bar_vals * deepcopy(min_conds[:-7]) / 0.628e-3
#max_conds_scaled = bar_vals * deepcopy(max_conds[:-7]) / 0.628e-3
#axmemparams.plot(width / 2+np.arange(len(min_conds_scaled)), min_conds_scaled, col1)
#axmemparams.plot(width / 2+np.arange(len(min_conds_scaled)), max_conds_scaled, col2)
#axmemparams.fill_between(width / 2+np.arange(len(max_conds_scaled)), min_conds_scaled, max_conds_scaled,
# facecolor=col3, alpha=0.5)
for k in range(len(params[:-7])):
start_t = int(values_each*k+(1-indicator_fraction)/2*values_each)
end_t = int(values_each*(k+1)-(1-indicator_fraction)/2*values_each)
time_diff = end_t - start_t
axmemparams.plot(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1], col_minmax)
axmemparams.plot(full_time[start_t:end_t][::time_diff-1], full_max_conds[start_t:end_t][::time_diff-1], col_minmax)
axmemparams.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1],
full_max_conds[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
names = viz.get_labels(hyperparams, mathmode=True, include_q10=False)[:-7]
if mem_dimensions is not None:
names = names[mem_dimensions]
axmemparams.set_ylim((0, 700)) # 850
axmemparams.set_xticks(lticks + width / 2)
if param_names: axmemparams.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axmemparams.axes.get_xaxis().set_visible(False)
axmemparams.axes.get_yaxis().set_visible(False)
axmemparams.xaxis.set_tick_params(labelsize=font_size)
axmemparams.yaxis.set_tick_params(labelsize=font_size)
small_offset = [0.00, -0.0, -0.0, 0.0, -0.00, 0.0]
font_decrease = 1.7
if mode == '13D':
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -390, 'x', fontsize=font_size / 2)
axmemparams.set_ylim((0, 600)) # 850
small_offset = [0.15, -0.04, -0.1, 0.0, -0.02, 0.0]
for i in range(6):
if i == 2 or i == 3 or i == 4 or i == 5: # -620
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -450,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0 + small_offset[i], -450, r'$%s$' % str(int(bar_vals[i])),
fontsize=font_size / font_decrease)
axmemparams.text(0.11, -0.95, r'Membrane $\mathregular{\bar g}$', fontsize=font_size, transform=axmemparams.transAxes)
axmemparams.text(0.22, -1.12, '[mS/cm'+chr(0x00b0 + 2)+']', fontsize=font_size, transform=axmemparams.transAxes)
else:
if mode_for_membrane_height == 'high':
axmemparams.set_ylim((0, 1000))
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -650, 'x', fontsize=font_size / 2)
for i in range(6):
if bar_vals[i] == 1:
axmemparams.text(-0.2 + i * 1.02 + 0.18, -750,
r'$1$', fontsize=font_size / font_decrease)
elif bar_vals[i] == 10:
axmemparams.text(-0.2 + i * 1.0 + 0.05, -750,
r'$10$', fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0, -750,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -470, 'x', fontsize=font_size / 2)
for i in range(6):
if i==0 or i==1 or i == 2 or i == 3 or i == 4 or i == 5: #-620
axmemparams.text(-0.2+i*1.0+small_offset[i], -530, r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),include_multiplier=False, negative_num=False), fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2+i*1.0+small_offset[i], -530, r'$%s$' % str(int(bar_vals[i])), fontsize=font_size/font_decrease)
axmemparams.text(0.11, -0.95, r'Membrane $\mathregular{\bar g}$', fontsize=font_size, transform=axmemparams.transAxes)
axmemparams.text(0.22, -1.12, '[mS/cm' + chr(0x00b0 + 2) + ']', fontsize=font_size, transform=axmemparams.transAxes)
lticks = np.arange(len(params[-7:]))
end_of_time_axis = len(params[-7:])-1+width
full_time = np.linspace(width/2-0.5, end_of_time_axis+0.5-width/2, values_each * len(params[-7:]))
full_min_conds = np.tile(min_conds[-7:] * 1e-3, (values_each,1))
full_min_conds = full_min_conds.flatten(order='F')
full_max_conds = np.tile(max_conds[-7:] * 1e-3, (values_each, 1))
full_max_conds = full_max_conds.flatten(order='F')
axsynparams.bar(lticks + width / 2, params[-7:]*1e-3, width, color=col_bar)
#axsynparams.plot(width / 2+np.arange(len(min_conds[-7:])), min_conds[-7:]*0.628e-3, col1)
#axsynparams.plot(width / 2+np.arange(len(min_conds[-7:])), max_conds[-7:]*0.628e-3, col2)
#axsynparams.fill_between(width / 2 + np.arange(len(min_conds[-7:])), min_conds[-7:] * 0.628e-3,
# max_conds[-7:] * 0.628e-3, facecolor=col3, alpha=0.5)
for k in range(len(params[-7:])):
start_t = int(values_each * k + (1 - indicator_fraction) / 2 * values_each)
end_t = int(values_each * (k + 1) - (1 - indicator_fraction) / 2 * values_each)
time_diff = end_t - start_t
axsynparams.plot(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1], col_minmax)
axsynparams.plot(full_time[start_t:end_t][::time_diff-1], full_max_conds[start_t:end_t][::time_diff-1], col_minmax)
axsynparams.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1],
full_max_conds[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
axsynparams.text(0.22, -0.95, r'Synaptic $\mathregular{\bar g}$', fontsize=font_size, transform=axsynparams.transAxes)
axsynparams.text(0.4, -1.12, '[nS]', fontsize=font_size, transform=axsynparams.transAxes)
names = viz.get_labels(hyperparams, include_q10=False)[-7:]
#axsynparams.set_ylabel('Synapses', fontsize=font_size)
axsynparams.set_yscale('log')
axsynparams.set_ylim((1e-8*1e-3, 1.3*1e-3*1e-3))
axsynparams.set_xticks(lticks + width / 2)
axsynparams.set_yticks([1e-11, 1e-9, 1e-7])
#locmin = matplotlib.ticker.LogLocator(base=10.0, subs=(0.2, 0.4, 0.6, 0.8), numticks=12)
#axsynparams.yaxis.set_minor_locator(locmin)
#axsynparams.yaxis.set_minor_formatter(matplotlib.ticker.NullFormatter())
if param_names: axsynparams.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axsynparams.axes.get_xaxis().set_visible(False)
axsynparams.axes.get_yaxis().set_visible(False)
axsynparams.xaxis.set_tick_params(labelsize=font_size)
axsynparams.yaxis.set_tick_params(labelsize=font_size)
axsynparams.spines['right'].set_visible(False)
axsynparams.spines['top'].set_visible(False)
axmemparams.spines['right'].set_visible(False)
axmemparams.spines['top'].set_visible(False)
if with_ss:
lticks = np.arange(len(stats))
if stat_scale is None:
stats[8:] *= 2000
min_stats_scaled = deepcopy(min_stats)
max_stats_scaled = deepcopy(max_stats)
if stat_scale is None:
min_stats_scaled[8:] = min_stats_scaled[8:] * 2000
max_stats_scaled[8:] = max_stats_scaled[8:] * 2000
axss.bar(lticks + width / 2, stats, width, color=col_bar)
#axss.plot(width / 2+np.arange(len(min_stats_scaled)), min_stats_scaled, col1)
#axss.plot(width / 2+np.arange(len(min_stats_scaled)), max_stats_scaled, col2)
#axss.fill_between(width / 2+np.arange(len(min_stats_scaled)), min_stats_scaled, max_stats_scaled,
# facecolor=col3, alpha=0.5)
end_of_time_axis = len(stats) - 1 + width
full_time = np.linspace(width / 2 - 0.5, end_of_time_axis + 0.5 - width / 2, values_each * len(stats))
full_min_ss = np.tile(min_stats_scaled, (values_each, 1))
full_min_ss = full_min_ss.flatten(order='F')
full_max_ss = np.tile(max_stats_scaled, (values_each, 1))
full_max_ss = full_max_ss.flatten(order='F')
for k in range(len(stats)):
start_t = int(values_each * k + (1 - indicator_fraction) / 2 * values_each)
end_t = int(values_each * (k + 1) - (1 - indicator_fraction) / 2 * values_each)
time_diff = end_t - start_t
axss.plot(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1], col_minmax)
axss.plot(full_time[start_t:end_t][::time_diff-1], full_max_ss[start_t:end_t][::time_diff-1], col_minmax)
axss.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1],
full_max_ss[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
axss.text(0.27, -0.95, 'Summary statistics', fontsize=font_size, transform=axss.transAxes)
axss.text(0.145, -1.12, '[st. dev. of experimental data]', fontsize=font_size, transform=axss.transAxes)
nan_pos = np.where(np.isnan(stats_nan))[0]
if stat_scale is not None:
axss.scatter(nan_pos+width/2, 3.5*np.ones_like(nan_pos),
c='k', s=70.0, zorder=2, marker='x')
else:
axss.scatter(nan_pos + width / 2, 1900 * np.ones_like(nan_pos),
c='k', s=70.0, zorder=2, marker='x')
# add some text for labels, title and axes ticks
names = []
for num in range(15):
names.append(get_summ_stat_name(num))
#axss.set_ylabel('Summary Statistics', fontsize=font_size)
axss.set_yticks([-4, -2, 0, 2, 4])
axss.set_yticklabels([r'$-4 \sigma$', '$-2 \sigma$', '0', '$2 \sigma$', '$4 \sigma$'])
#axss.axes.get_yaxis().set_ticks([])
axss.set_xticks(lticks + width / 2)
if ss_names: axss.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axss.axes.get_xaxis().set_visible(False)
axss.axes.get_yaxis().set_visible(False)
axss.xaxis.set_tick_params(labelsize=font_size)
axss.yaxis.set_tick_params(labelsize=font_size)
if stat_scale is not None:
axss.set_ylim([-4.0, 4.0])
else:
axss.set_ylim([-450, 2100])
axss.spines['right'].set_visible(False)
axss.spines['top'].set_visible(False)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
sns.set(style="ticks", font_scale=1)
sns.despine()
axV.set_title('')
if save_fig:
plt.savefig('../../thesis_results/pdf/'+date_today+'_sample_prinz_'+case+'_{}_{}.pdf'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/png/'+date_today+'_sample_prinz_'+case+'_{}_{}.png'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/svg/'+date_today+'_sample_prinz_'+case+'_{}_{}.svg'.format(test_idx[0], counter),
bbox_inches='tight')
return axV, axss, axmemparams, axsynparams
def vis_sample_subfig_twitter(m, s, sample, hyperparams, stats=None, t_on=None, t_off=None, with_ss=False, with_params=False, voltage_trace=None,
test_idx=None, case=None, title=None, date_today=None, counter=0, save_fig=False, legend_offset=0.0,
axV=None, axss=None, axmemparams=None, axsynparams=None, max_stats=None, min_stats=None,
mem_dimensions=None, mode='13D', mode_for_membrane_height=None, offset=0,
stat_mean=None, stat_std=None, scale_bar=True, stat_scale=None, current_col='g',
max_conds=None, min_conds=None, legend=True, ss_names=True, param_names=True):
"""
Based on vis_sample. Is called when the pdf should be shown next ot the sample.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
# Hyperparameters for plotting
font_size=15.0 # fontsize of the labels
col_bar = 'k' # color of the bars for summstats and conductances
col_minmax = 'k' # color of the horizontal line indicating the max and min value of summstats and conds
col_shade = 'k' # color of the shade between the max and min values
values_each = 100 # not so important. How many values we evaluate for the max and min values
indicator_fraction = 0.8 # breath of the horizontal bars for max and min, should be within [0,1]
opacity = 0.5 # opacity of the shade
width = 0.35 # the width of the bars
neuron_labels = ['AB/PD', 'LP', 'PY'] # labels for the legends
scale_bar_breadth = 1000
scale_bar_voltage_breadth = 50
if voltage_trace is None: data = m.gen_single(sample)
else: data = voltage_trace
Vx = data['data']
params = data['params']
#stats = s.calc([data])[0]
stats_nan = deepcopy(stats)
#stats[np.isnan(stats)]=0.0
#stats = scale_to_experimental(stats)
bar_scaling_factors = [1.0, 10, 100, 10, 100, 1, 10000, 10000]
bar_scaling_factors = np.asarray([[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000]])
bar_vals = bar_scaling_factors[np.asarray(hyperparams.use_membrane)]
if mem_dimensions is not None:
params_trunc = params[mem_dimensions].tolist()
params_trunc += params[-7:].tolist()
bar_vals = bar_vals[mem_dimensions]
params = np.asarray(params_trunc)
step_Vtrace = 5
for j in range(len(prinzdb.neutypes)):
axV.plot(m.t[25500+offset:25500+115000+offset:step_Vtrace], Vx[j,25500+offset:25500+115000+offset:step_Vtrace]+140.0*(2-j), label=neuron_labels[j], c='k', lw=0.6)
box = axV.get_position()
if t_on is not None:
axV.axvline(t_on, c='r', ls='--')
if t_on is not None:
axV.axvline(t_off, c='r', ls='--')
axV.set_position([box.x0, box.y0, box.width, box.height])
axV.axes.get_yaxis().set_ticks([])
axV.axes.get_xaxis().set_ticks([])
if legend:
if scale_bar:
axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.15),
ncol=len(prinzdb.neutypes), fontsize=font_size)
else:
axV.legend(loc='upper center', bbox_to_anchor=(0.5, 1.18),
ncol=len(prinzdb.neutypes), fontsize=font_size)
axV.xaxis.set_tick_params(labelsize=font_size)
axV.yaxis.set_tick_params(labelsize=font_size)
axV.spines['right'].set_visible(False)
axV.spines['top'].set_visible(False)
axV.spines['bottom'].set_visible(False)
axV.spines['left'].set_visible(False)
sns.set(style="ticks", font_scale=1)
sns.despine()
axV.set_title('')
if save_fig:
plt.savefig('../../thesis_results/pdf/'+date_today+'_sample_prinz_'+case+'_{}_{}.pdf'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/png/'+date_today+'_sample_prinz_'+case+'_{}_{}.png'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/svg/'+date_today+'_sample_prinz_'+case+'_{}_{}.svg'.format(test_idx[0], counter),
bbox_inches='tight')
return axV
def vis_sample_subfig_no_voltage(m, s, sample, hyperparams, stats=None, t_on=None, t_off=None, with_ss=True, with_params=True, voltage_trace=None,
test_idx=None, case=None, title=None, date_today=None, counter=0, save_fig=False, legend_offset=0.0,
axss=None, axmemparams=None, axsynparams=None, max_stats=None, min_stats=None,
mem_dimensions=None, mode='13D', mode_for_membrane_height=None, labels_=True, color_input='k',
stat_mean=None, stat_std=None, scale_bar=True, stat_scale=None, current_col='g',
max_conds=None, min_conds=None, legend=True, ss_names=True, param_names=True):
"""
Based on vis_sample. Is called when the pdf should be shown next ot the sample.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
# Hyperparameters for plotting
font_size=8.0 # fontsize of the labels
col_bar = color_input # color of the bars for summstats and conductances
col_minmax = color_input # color of the horizontal line indicating the max and min value of summstats and conds
col_shade = color_input # color of the shade between the max and min values
values_each = 100 # not so important. How many values we evaluate for the max and min values
indicator_fraction = 0.8 # breath of the horizontal bars for max and min, should be within [0,1]
opacity = 0.5 # opacity of the shade
width = 0.35 # the width of the bars
neuron_labels = ['AB/PD', 'LP', 'PY'] # labels for the legends
scale_bar_breadth = 1000
scale_bar_voltage_breadth = 50
plot_bars=False
if voltage_trace is None: data = m.gen_single(sample)
else: data = voltage_trace
params = sample
stats_nan = deepcopy(stats)
bar_scaling_factors = np.asarray([[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000]])
bar_vals = bar_scaling_factors[np.asarray(hyperparams.use_membrane)]
if mem_dimensions is not None:
params_trunc = params[mem_dimensions].tolist()
params_trunc += params[-7:].tolist()
bar_vals = bar_vals[mem_dimensions]
params = np.asarray(params_trunc)
if with_params:
lticks = np.arange(len(params[:-7]))
end_of_time_axis = len(params[:-7]) - 1 + width
full_time = np.linspace(width/2-0.5, end_of_time_axis+0.5-width/2, values_each * len(params[:-7]))
full_min_conds = np.tile(bar_vals * min_conds[:-7] / 0.628e-3, (values_each, 1))
full_min_conds = full_min_conds.flatten(order='F')
full_max_conds = np.tile(bar_vals * max_conds[:-7] / 0.628e-3, (values_each, 1))
full_max_conds = full_max_conds.flatten(order='F')
if plot_bars:
axmemparams.bar(lticks + width / 2, bar_vals * params[:-7] / 0.628e-3, width,
bottom=min(1e-8, np.min(params[:-7])), color=col_bar)
for k in range(len(params[:-7])):
start_t = int(values_each*k+(1-indicator_fraction)/2*values_each)
end_t = int(values_each*(k+1)-(1-indicator_fraction)/2*values_each)
time_diff = end_t - start_t
axmemparams.plot(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1], c=col_minmax)
axmemparams.plot(full_time[start_t:end_t][::time_diff-1], full_max_conds[start_t:end_t][::time_diff-1], c=col_minmax)
axmemparams.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1],
full_max_conds[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
names = viz.get_labels_8pt(hyperparams, include_q10=False)[:-7]
if mem_dimensions is not None:
names = names[mem_dimensions]
axmemparams.set_ylim((0, 1000)) # 850
axmemparams.set_xticks(lticks + width / 2)
new_names = []
count = 0
for n in names:
#if int(bar_vals[count]):
# new_names.append(str(int(bar_vals[count])) + ' ' + 'x ' + n)
#else:
new_names.append(str(int(bar_vals[count]))+' '+'x '+n)
count += 1
if param_names: axmemparams.set_xticklabels(new_names, rotation='vertical', fontsize=font_size)
else:
axmemparams.axes.get_xaxis().set_visible(False)
#axmemparams.axes.get_yaxis().set_visible(False)
axmemparams.xaxis.set_tick_params(labelsize=font_size)
axmemparams.yaxis.set_tick_params(labelsize=font_size)
small_offset = [0.00, -0.0, -0.0, 0.0, -0.00, 0.0]
font_decrease = 1.7
if labels_:
if mode == '13D':
axmemparams.set_ylim((0, 1000)) # 850
axmemparams.text(0.36, -1.04, 'Membrane conductances', fontsize=font_size,
transform=axmemparams.transAxes)
axmemparams.text(0.43, -1.20, r'$\mathdefault{[mS/cm}^2\mathdefault{]}$', fontsize=font_size, transform=axmemparams.transAxes)
else:
if mode_for_membrane_height == 'high':
axmemparams.set_ylim((0, 1000))
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -650, 'x', fontsize=font_size / 2)
for i in range(6):
if bar_vals[i] == 1:
axmemparams.text(-0.2 + i * 1.02 + 0.18, -750,
r'$1$', fontsize=font_size / font_decrease)
elif bar_vals[i] == 10:
axmemparams.text(-0.2 + i * 1.0 + 0.05, -750,
r'$10$', fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2 + i * 1.0, -750,
r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),
include_multiplier=False, negative_num=False),
fontsize=font_size / font_decrease)
else:
for i in range(6): # 520 or so
axmemparams.text(-0.0 + i * 1.03, -470, 'x', fontsize=font_size / 2)
for i in range(6):
if i==0 or i==1 or i == 2 or i == 3 or i == 4 or i == 5: #-620
axmemparams.text(-0.2+i*1.0+small_offset[i], -530, r'$%s$' % build_string(conductance_to_value_exp([bar_vals[i]]),include_multiplier=False, negative_num=False), fontsize=font_size / font_decrease)
else:
axmemparams.text(-0.2+i*1.0+small_offset[i], -530, r'$%s$' % str(int(bar_vals[i])), fontsize=font_size/font_decrease)
#axmemparams.text(0.11, -1.50, r'Membrane $\mathregular{\bar g}$', fontsize=font_size, transform=axmemparams.transAxes)
axmemparams.text(0.11, -1.60, r'Membrane $\mathregular{\bar g}$', fontsize=font_size,
transform=axmemparams.transAxes)
axmemparams.text(0.22, -1.77, '[mS/cm' + chr(0x00b0 + 2) + ']', fontsize=font_size, transform=axmemparams.transAxes)
lticks = np.arange(len(params[-7:]))
end_of_time_axis = len(params[-7:])-1+width
full_time = np.linspace(width/2-0.5, end_of_time_axis+0.5-width/2, values_each * len(params[-7:]))
full_min_conds = np.tile(min_conds[-7:] * 1e-3, (values_each,1))
full_min_conds = full_min_conds.flatten(order='F')
full_max_conds = np.tile(max_conds[-7:] * 1e-3, (values_each, 1))
full_max_conds = full_max_conds.flatten(order='F')
full_min_conds *= 1e9
full_max_conds *= 1e9
if plot_bars:
axsynparams.bar(lticks + width / 2, params[-7:]*1e-3, width, color=col_bar)
for k in range(len(params[-7:])):
start_t = int(values_each * k + (1 - indicator_fraction) / 2 * values_each)
end_t = int(values_each * (k + 1) - (1 - indicator_fraction) / 2 * values_each)
time_diff = end_t - start_t
axsynparams.plot(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1], c=col_minmax)
axsynparams.plot(full_time[start_t:end_t][::time_diff-1], full_max_conds[start_t:end_t][::time_diff-1], c=col_minmax)
axsynparams.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_conds[start_t:end_t][::time_diff-1],
full_max_conds[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
if labels_:
#axsynparams.text(0.27, -0.85, r'Synaptic $\mathregular{\bar g}$', fontsize=font_size, transform=axsynparams.transAxes)
axsynparams.text(0.09, -1.04, 'Synaptic conductances', fontsize=font_size,
transform=axsynparams.transAxes)
axsynparams.text(0.37, -1.19, '[nS]', fontsize=font_size, transform=axsynparams.transAxes)
names = viz.get_labels_8pt(hyperparams, mathmode=True, include_q10=False)[-7:]
axsynparams.set_yscale('log')
# axsynparams.set_ylim((1e-8*1e-3*1e9, 1.3*1e-3*1e-3*1e9))
axsynparams.set_xticks(lticks + width / 2)
#axsynparams.set_yticks([0.01, 1.0, 100])
axsynparams.set_ylim([0.01, 1000])
if param_names: axsynparams.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axsynparams.axes.get_xaxis().set_visible(False)
axsynparams.xaxis.set_tick_params(labelsize=font_size)
axsynparams.yaxis.set_tick_params(labelsize=font_size)
axsynparams.spines['right'].set_visible(False)
axsynparams.spines['top'].set_visible(False)
axmemparams.spines['right'].set_visible(False)
axmemparams.spines['top'].set_visible(False)
axmemparams.tick_params(width=2.0 * 0.666, length=5.0 * 0.666)
axsynparams.tick_params(width=2.0 * 0.666, length=5.0 * 0.666)
axmemparams.tick_params(width=2.0 * 0.4, length=5.0 * 0.4, which='minor')
axsynparams.tick_params(width=2.0 * 0.4, length=5.0 * 0.4, which='minor')
if with_ss:
lticks = np.arange(len(stats))
if stat_scale is None:
stats[8:] *= 2000
min_stats_scaled = deepcopy(min_stats)
max_stats_scaled = deepcopy(max_stats)
if stat_scale is None:
min_stats_scaled[8:] = min_stats_scaled[8:] * 2000
max_stats_scaled[8:] = max_stats_scaled[8:] * 2000
if plot_bars:
axss.bar(lticks + width / 2, stats, width, color=col_bar)
end_of_time_axis = len(stats) - 1 + width
full_time = np.linspace(width / 2 - 0.5, end_of_time_axis + 0.5 - width / 2, values_each * len(stats))
full_min_ss = np.tile(min_stats_scaled, (values_each, 1))
full_min_ss = full_min_ss.flatten(order='F')
full_max_ss = np.tile(max_stats_scaled, (values_each, 1))
full_max_ss = full_max_ss.flatten(order='F')
for k in range(len(stats)):
start_t = int(values_each * k + (1 - indicator_fraction) / 2 * values_each)
end_t = int(values_each * (k + 1) - (1 - indicator_fraction) / 2 * values_each)
time_diff = end_t - start_t
axss.plot(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1], c=col_minmax)
axss.plot(full_time[start_t:end_t][::time_diff-1], full_max_ss[start_t:end_t][::time_diff-1], c=col_minmax)
axss.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1],
full_max_ss[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
if labels_:
axss.text(0.33, -0.68, 'Summary statistics', fontsize=font_size, transform=axss.transAxes)
axss.text(0.322, -0.80, '[st. dev. of samples]', fontsize=font_size, transform=axss.transAxes)
nan_pos = np.where(np.isnan(stats_nan))[0]
if stat_scale is not None:
axss.scatter(nan_pos+width/2, 1.7*np.ones_like(nan_pos),
c=col_minmax, s=25.0, zorder=2, marker='x')
else:
axss.scatter(nan_pos + width / 2, 1900 * np.ones_like(nan_pos),
c=col_minmax, s=25.0, zorder=2, marker='x')
# add some text for labels, title and axes ticks
names = []
for num in range(15):
names.append(get_summ_stat_name_text(num))
#axss.set_yticks([-4, -2, 0, 2, 4])
axss.set_yticks([-2, -1, 0, 1, 2])
#axss.set_yticklabels([r'$-4 \sigma$', '$-2 \sigma$', '0', '$2 \sigma$', '$4 \sigma$'])
axss.set_yticklabels(['$\mathdefault{-2} \sigma$', '$\mathdefault{-}\sigma$', '0', '$\sigma$', '$\mathdefault{2} \sigma$'])
axss.set_xticks(lticks + width / 2)
if ss_names: axss.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axss.axes.get_xaxis().set_visible(False)
#axss.axes.get_yaxis().set_visible(False)
axss.xaxis.set_tick_params(labelsize=font_size)
axss.yaxis.set_tick_params(labelsize=font_size)
if stat_scale is not None:
axss.set_ylim([-2.0, 2.0])
else:
axss.set_ylim([-450, 2100])
axss.spines['right'].set_visible(False)
axss.spines['top'].set_visible(False)
axss.tick_params(width=2.0 * 0.666, length=5.0 * 0.666)
#axss.get_xaxis().set_tick_params(
# which='both', direction='out', labelsize=font_size*3)
sns.set(style="ticks", font_scale=1)
sns.despine()
if save_fig:
plt.savefig('../../thesis_results/pdf/'+date_today+'_sample_prinz_'+case+'_{}_{}.pdf'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/png/'+date_today+'_sample_prinz_'+case+'_{}_{}.png'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/svg/'+date_today+'_sample_prinz_'+case+'_{}_{}.svg'.format(test_idx[0], counter),
bbox_inches='tight')
if axmemparams is not None and axss is not None:
return axss, axmemparams, axsynparams
elif axss is not None:
return axss
elif axmemparams is not None:
return axmemparams, axsynparams
def vis_ss_barplot(m, s, sample, hyperparams, stats=None, t_on=None, t_off=None, with_ss=True, with_params=True, voltage_trace=None,
test_idx=None, case=None, title=None, date_today=None, counter=0, save_fig=False, legend_offset=0.0,
axss=None, axmemparams=None, axsynparams=None, max_stats=None, min_stats=None,
mem_dimensions=None, mode='13D', mode_for_membrane_height=None, labels_=True, color_input='k',
stat_mean=None, stat_std=None, scale_bar=True, stat_scale=None, current_col='g',
max_conds=None, min_conds=None, legend=True, ss_names=True, param_names=True):
"""
Based on vis_sample. Is called when the pdf should be shown next ot the sample.
:param m: generator object, from m = netio.create_simulators(params)[0]
:param s: summstat object, from s = netio.create_summstats(params)
:param sample: membrane/synaptic conductances
:param t_on:
:param t_off:
:param with_ss: bool, True if bars for summary stats are wanted
:param with_params: bool, True if bars for parameters are wanted
:return: figure object
"""
# Hyperparameters for plotting
font_size=8.0 # fontsize of the labels
col_bar = color_input # color of the bars for summstats and conductances
col_minmax = color_input # color of the horizontal line indicating the max and min value of summstats and conds
col_shade = color_input # color of the shade between the max and min values
values_each = 100 # not so important. How many values we evaluate for the max and min values
indicator_fraction = 0.8 # breath of the horizontal bars for max and min, should be within [0,1]
opacity = 0.5 # opacity of the shade
width = 0.35 # the width of the bars
neuron_labels = ['AB/PD', 'LP', 'PY'] # labels for the legends
scale_bar_breadth = 1000
scale_bar_voltage_breadth = 50
plot_bars=False
params = sample
stats_nan = deepcopy(stats)
bar_scaling_factors = np.asarray([[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000],
[1.0, 100.0, 100.0, 10.0, 100.0, 1.0, 10000, 10000]])
bar_vals = bar_scaling_factors[np.asarray(hyperparams.use_membrane)]
if mem_dimensions is not None:
params_trunc = params[mem_dimensions].tolist()
params_trunc += params[-7:].tolist()
bar_vals = bar_vals[mem_dimensions]
params = np.asarray(params_trunc)
if with_ss:
lticks = np.arange(len(stats))
if stat_scale is None:
stats[8:] *= 2000
min_stats_scaled = deepcopy(min_stats)
max_stats_scaled = deepcopy(max_stats)
if stat_scale is None:
min_stats_scaled[8:] = min_stats_scaled[8:] * 2000
max_stats_scaled[8:] = max_stats_scaled[8:] * 2000
if plot_bars:
axss.bar(lticks + width / 2, stats, width, color=col_bar)
end_of_time_axis = len(stats) - 1 + width
full_time = np.linspace(width / 2 - 0.5, end_of_time_axis + 0.5 - width / 2, values_each * len(stats))
full_min_ss = np.tile(min_stats_scaled, (values_each, 1))
full_min_ss = full_min_ss.flatten(order='F')
full_max_ss = np.tile(max_stats_scaled, (values_each, 1))
full_max_ss = full_max_ss.flatten(order='F')
for k in range(len(stats)):
start_t = int(values_each * k + (1 - indicator_fraction) / 2 * values_each)
end_t = int(values_each * (k + 1) - (1 - indicator_fraction) / 2 * values_each)
time_diff = end_t - start_t
axss.plot(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1], c=col_minmax)
axss.plot(full_time[start_t:end_t][::time_diff-1], full_max_ss[start_t:end_t][::time_diff-1], c=col_minmax)
axss.fill_between(full_time[start_t:end_t][::time_diff-1], full_min_ss[start_t:end_t][::time_diff-1],
full_max_ss[start_t:end_t][::time_diff-1], facecolor=col_shade, alpha=opacity)
if labels_:
axss.text(0.33, -0.68, 'Summary statistics', fontsize=font_size, transform=axss.transAxes)
axss.text(0.322, -0.80, '[st. dev. of samples]', fontsize=font_size, transform=axss.transAxes)
nan_pos = np.where(np.isnan(stats_nan))[0]
if stat_scale is not None:
axss.scatter(nan_pos+width/2, 1.7*np.ones_like(nan_pos),
c=col_minmax, s=25.0, zorder=2, marker='x')
else:
axss.scatter(nan_pos + width / 2, 1900 * np.ones_like(nan_pos),
c=col_minmax, s=25.0, zorder=2, marker='x')
# add some text for labels, title and axes ticks
names = []
for num in range(15):
names.append(get_summ_stat_name_text(num))
#axss.set_yticks([-4, -2, 0, 2, 4])
axss.set_yticks([-2, -1, 0, 1, 2])
#axss.set_yticklabels([r'$-4 \sigma$', '$-2 \sigma$', '0', '$2 \sigma$', '$4 \sigma$'])
axss.set_yticklabels(['$\mathdefault{-2} \sigma$', '$\mathdefault{-}\sigma$', '0', '$\sigma$', '$\mathdefault{2} \sigma$'])
axss.set_xticks(lticks + width / 2)
if ss_names: axss.set_xticklabels(names, rotation='vertical', fontsize=font_size)
else:
axss.axes.get_xaxis().set_visible(False)
#axss.axes.get_yaxis().set_visible(False)
axss.xaxis.set_tick_params(labelsize=font_size)
axss.yaxis.set_tick_params(labelsize=font_size)
if stat_scale is not None:
axss.set_ylim([-2.0, 2.0])
else:
axss.set_ylim([-450, 2100])
axss.spines['right'].set_visible(False)
axss.spines['top'].set_visible(False)
axss.tick_params(width=2.0 * 0.666, length=5.0 * 0.666)
#axss.get_xaxis().set_tick_params(
# which='both', direction='out', labelsize=font_size*3)
sns.set(style="ticks", font_scale=1)
sns.despine()
if save_fig:
plt.savefig('../../thesis_results/pdf/'+date_today+'_sample_prinz_'+case+'_{}_{}.pdf'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/png/'+date_today+'_sample_prinz_'+case+'_{}_{}.png'.format(test_idx[0], counter),
bbox_inches='tight')
plt.savefig('../../thesis_results/svg/'+date_today+'_sample_prinz_'+case+'_{}_{}.svg'.format(test_idx[0], counter),
bbox_inches='tight')
if axmemparams is not None and axss is not None:
return axss, axmemparams, axsynparams
elif axss is not None:
return axss
elif axmemparams is not None:
return axmemparams, axsynparams
| 49.073156 | 224 | 0.588293 | 11,289 | 81,167 | 4.023297 | 0.043937 | 0.025892 | 0.035932 | 0.010788 | 0.946388 | 0.937625 | 0.925802 | 0.914793 | 0.905876 | 0.897114 | 0 | 0.053277 | 0.27526 | 81,167 | 1,653 | 225 | 49.102843 | 0.718827 | 0.12765 | 0 | 0.830938 | 0 | 0 | 0.045176 | 0.009753 | 0 | 0 | 0.000427 | 0 | 0 | 1 | 0.007608 | false | 0 | 0.016061 | 0 | 0.031276 | 0.005072 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
19aba72551d2a2fafcdd1e66e724fbe8c7260961 | 28,563 | py | Python | data/dataloader.py | DLWK/EANet | 3680e099dd815117d4a54f928fb8247aa2f0b71a | [
"MIT"
] | 14 | 2021-04-17T14:28:58.000Z | 2022-03-28T09:38:24.000Z | data/dataloader.py | DLWK/EANet | 3680e099dd815117d4a54f928fb8247aa2f0b71a | [
"MIT"
] | 2 | 2021-12-13T14:12:45.000Z | 2022-03-31T14:37:33.000Z | data/dataloader.py | DLWK/EANet | 3680e099dd815117d4a54f928fb8247aa2f0b71a | [
"MIT"
] | 3 | 2021-07-14T14:15:53.000Z | 2022-03-31T12:14:34.000Z | import torch
import cv2
import os
import glob
from torch.utils.data import Dataset
import random
import torch.nn.functional as F
# class ISBI_Loader(Dataset):
# def __init__(self, data_path,transform):
# # 初始化函数,读取所有data_path下的图片
# self.data_path = data_path
# self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.jpg'))
# self.transform=transform
# def augment(self, image, flipCode):
# # 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
# flip = cv2.flip(image, flipCode)
# return flip
# def __getitem__(self, index):
# # 根据index读取图片
# image_path = self.imgs_path[index]
# # 根据image_path生成label_path
# label_path = image_path.replace('image', 'label').split('.')[0]+"_mask.png"
# edge_path = image_path.replace('image', 'label').split('.')[0]+"_edge.jpg"
# # 读取训练图片和标签图片
# image = cv2.imread(image_path, 0)
# label = cv2.imread(label_path, 0)
# edge = cv2.imread(edge_path, 0)
# # 将数据转为单通道的图片
# # image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# # label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
# image = image.reshape(1, image.shape[0], image.shape[1])
# label = label.reshape(1, label.shape[0], label.shape[1])
# edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# # 处理标签,将像素值为255的改为1
# if label.max() > 1:
# label = label / 255
# if edge.max() > 1:
# edge = edge / 255
# # 随机进行数据增强,为2时不做处理
# flipCode = random.choice([-1, 0, 1, 2])
# if flipCode != 2:
# image = self.augment(image, flipCode)
# label = self.augment(label, flipCode)
# edge = self.augment(edge, flipCode)
# return image, label, edge
# def __len__(self):
# # 返回训练集大小
# return len(self.imgs_path)
class ISBI_Loader(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('image', 'label').split('.')[0]+"_mask.png"
edge_path = image_path.replace('image', 'label').split('.')[0]+"_edge.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class ISBI_Loadertest(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('image', 'label').split('.')[0]+"_mask.png"
edge_path = image_path.replace('image', 'label').split('.')[0]+"_edge.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
# flipCode = random.choice([-1, 0, 1, 2])
# if flipCode != 2:
# image = self.augment(image, flipCode)
# label = self.augment(label, flipCode)
# edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class liver_Loader(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, '*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
path = os.path.dirname(image_path)
name =image_path.split('/')[-1][0:3]
image_path = path +'/'+ name +'.png'
# 根据image_path生成label_path
label_path =path +'/'+ name+"_mask.png"
edge_path = path +'/'+name+"_edge.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class liver_Loadertest(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, '*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
path = os.path.dirname(image_path)
name =image_path.split('/')[-1][0:3]
image_path = path +'/'+ name +'.png'
# 根据image_path生成label_path
label_path =path +'/'+ name+"_mask.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
# 随机进行数据增强,为2时不做处理
# flipCode = random.choice([-1, 0, 1, 2])
# if flipCode != 2:
# image = self.augment(image, flipCode)
# label = self.augment(label, flipCode)
# edge = self.augment(edge, flipCode)
return image, label
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class ISIC_Loader(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.jpg'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'label').split('.')[0]+"_Segmentation.png"
edge_path = image_path.replace('image', 'label').split('.')[0]+"_Segmentation_edge.png"
image = cv2.imread(image_path)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
image = image.reshape(3, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
label = label / 255
edge = edge / 255
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
return len(self.imgs_path)
class ISIC_Loadertest(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.jpg'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'label').split('.')[0]+"_Segmentation.png"
edge_path = image_path.replace('image', 'label').split('.')[0]+"_Segmentation_edge.png"
image = cv2.imread(image_path)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
image = image.reshape(3, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
label = label / 255
return image, label
def __len__(self):
return len(self.imgs_path)
class Lung_Loader(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'images/*.tif'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('images', 'masks').split('.')[0]+".tif"
edge_path = image_path.replace('images', 'edge').split('.')[0]+"_edge.png"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
return len(self.imgs_path)
class Lung_Loadertest(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'images/*.tif'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('images', 'masks').split('.')[0]+".tif"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
if label.max() > 1:
label = label / 255
return image, label
def __len__(self):
return len(self.imgs_path)
class CXR(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'mask').split('.')[0]+".png"
edge_path = image_path.replace('image', 'Edge').split('.')[0]+".png"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
return len(self.imgs_path)
class CXRtest(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'mask').split('.')[0]+".png"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
if label.max() > 1:
label = label / 255
return image, label
def __len__(self):
return len(self.imgs_path)
class JZX_Loader(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.bmp'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'GT').split('.')[0]+".bmp"
edge_path = image_path.replace('image', 'Edge').split('.')[0]+"_edge.png"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
label = label / 255
edge = edge / 255
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
return len(self.imgs_path)
class JZX_Loaderval(Dataset):
def __init__(self, data_path):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.bmp'))
def augment(self, image, flipCode):
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('image', 'GT').split('.')[0]+".bmp"
image = cv2.imread(image_path,0)
label = cv2.imread(label_path, 0)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
label = label / 255
return image, label
def __len__(self):
return len(self.imgs_path)
class COVD(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'Imgs/*.jpg'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('Imgs', 'GT').split('.')[0]+".png"
edge_path = image_path.replace('Imgs', 'Edge').split('.')[0]+".png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
if edge.max() > 1:
edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
edge = self.augment(edge, flipCode)
return image, label, edge
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class COVDtest(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'Imgs/*.jpg'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('Imgs', 'GT').split('.')[0]+".png"
edge_path = image_path.replace('Imgs', 'Edge').split('.')[0]+".png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
# edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
# edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
# if edge.max() > 1:
# edge = edge / 255
# 随机进行数据增强,为2时不做处理
# flipCode = random.choice([-1, 0, 1, 2])
# if flipCode != 2:
# image = self.augment(image, flipCode)
# label = self.augment(label, flipCode)
# edge = self.augment(edge, flipCode)
return image, label
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
###########################一个标准的的写法#############
import torch.utils.data as data
import PIL.Image as Image
import os
def make_dataset(root):
imgs = []
n = len(os.listdir(root)) // 2 #因为数据集中一套训练数据包含有训练图和mask图,所以要除2
for i in range(n):
img = os.path.join(root, "%03d.png" % i)
mask = os.path.join(root, "%03d_mask.png" % i)
imgs.append((img, mask))
return imgs
class LiverDataset(data.Dataset):
def __init__(self, root, transform=None, target_transform=None):
imgs = make_dataset(root)
self.imgs = imgs
self.transform = transform
self.target_transform = target_transform
def __getitem__(self, index):
x_path, y_path = self.imgs[index]
origin_x = Image.open(x_path)
origin_y = Image.open(y_path)
if self.transform is not None:
img_x = self.transform(origin_x)
if self.target_transform is not None:
img_y = self.target_transform(origin_y)
return img_x, img_y
def __len__(self):
return len(self.imgs)
class Lung1_Loader(Dataset):
def __init__(self, data_path, transform=None, target_transform=None):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'images/*.tif'))
self.transform =transform
self.target_transform =target_transform
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('images', 'masks').split('.')[0]+".tif"
edge_path = image_path.replace('images', 'edge').split('.')[0]+"_edge.png"
image = Image.open(image_path)
label = Image.open(label_path)
edge =Image.open(edge_path)
if self.transform is not None:
image = self.transform(image)
if self.target_transform is not None:
label = self.target_transform(label)
edge = self.target_transform(edge)
return image, label, edge
def __len__(self):
return len(self.imgs_path)
class Lung1_Loadertest(Dataset):
def __init__(self, data_path, transform=None, target_transform=None):
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'images/*.tif'))
self.transform =transform
self.target_transform =target_transform
def __getitem__(self, index):
image_path = self.imgs_path[index]
label_path = image_path.replace('images', 'masks').split('.')[0]+".tif"
image = Image.open(image_path)
label = Image.open(label_path)
if self.transform is not None:
image = self.transform(image)
if self.target_transform is not None:
label = self.target_transform(label)
return image, label
def __len__(self):
return len(self.imgs_path)
class FJJ_Loader(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('image', 'label').split('.')[0]+"_mask.png"
label_path1 = image_path.replace('image', 'body-origin').split('.')[0]+"_mask.png"
label_path2 = image_path.replace('image', 'detail-origin').split('.')[0]+"_mask.png"
# edge_path = image_path.replace('image', 'label').split('.')[0]+"_edge.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
label1 = cv2.imread(label_path1, 0)
label2 = cv2.imread(label_path2, 0)
# edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
label1 = label1.reshape(1, label1.shape[0], label1.shape[1])
label2 = label2.reshape(1, label2.shape[0], label2.shape[1])
# edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
label1 = label1 / 255
label2 = label2 / 255
# if edge.max() > 1:
# edge = edge / 255
# 随机进行数据增强,为2时不做处理
flipCode = random.choice([-1, 0, 1, 2])
if flipCode != 2:
image = self.augment(image, flipCode)
label = self.augment(label, flipCode)
label1 = self.augment(label1, flipCode)
label2 = self.augment(label2, flipCode)
# edge = self.augment(edge, flipCode)
return image, label, label1, label2
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
class FJJ_Loadertest(Dataset):
def __init__(self, data_path):
# 初始化函数,读取所有data_path下的图片
self.data_path = data_path
self.imgs_path = glob.glob(os.path.join(data_path, 'image/*.png'))
def augment(self, image, flipCode):
# 使用cv2.flip进行数据增强,filpCode为1水平翻转,0垂直翻转,-1水平+垂直翻转
flip = cv2.flip(image, flipCode)
return flip
def __getitem__(self, index):
# 根据index读取图片
image_path = self.imgs_path[index]
# 根据image_path生成label_path
label_path = image_path.replace('image', 'label').split('.')[0]+"_mask.png"
# label_path1 = image_path.replace('image', 'body-origin').split('.')[0]+"_mask.png"
# label_path2 = image_path.replace('image', 'detail-origin').split('.')[0]+"_mask.png"
# edge_path = image_path.replace('image', 'label').split('.')[0]+"_edge.png"
# 读取训练图片和标签图片
image = cv2.imread(image_path, 0)
label = cv2.imread(label_path, 0)
# label1 = cv2.imread(label_path1, 0)
# label2 = cv2.imread(label_path2, 0)
# edge = cv2.imread(edge_path, 0)
# 将数据转为单通道的图片
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# label = cv2.cvtColor(label, cv2.COLOR_BGR2GRAY)
image = image.reshape(1, image.shape[0], image.shape[1])
label = label.reshape(1, label.shape[0], label.shape[1])
# label1 = label1.reshape(1, label1.shape[0], label1.shape[1])
# label2 = label2.reshape(1, label2.shape[0], label2.shape[1])
# edge = edge.reshape(1, edge.shape[0], edge.shape[1])
# 处理标签,将像素值为255的改为1
if label.max() > 1:
label = label / 255
# label1 = label1 / 255
# label2 = label2 / 255
# if edge.max() > 1:
# edge = edge / 255
# 随机进行数据增强,为2时不做处理
# flipCode = random.choice([-1, 0, 1, 2])
# if flipCode != 2:
# image = self.augment(image, flipCode)
# label = self.augment(label, flipCode)
# label1 = self.augment(label1, flipCode)
# label2 = self.augment(label2, flipCode)
# edge = self.augment(edge, flipCode)
return image, label
def __len__(self):
# 返回训练集大小
return len(self.imgs_path)
if __name__ == "__main__":
isbi_dataset = ISBI_Loader("/home/wangkun/data/train_96")
print("数据个数:", len(isbi_dataset))
train_loader = torch.utils.data.DataLoader(dataset=isbi_dataset,
batch_size=4,
shuffle=True)
for image, label, edge in train_loader:
print(image.shape)
| 32.0213 | 95 | 0.57648 | 3,489 | 28,563 | 4.530811 | 0.038693 | 0.044408 | 0.043269 | 0.038462 | 0.947368 | 0.942814 | 0.942814 | 0.935919 | 0.93497 | 0.928454 | 0 | 0.034423 | 0.291111 | 28,563 | 892 | 96 | 32.0213 | 0.746296 | 0.201064 | 0 | 0.855649 | 0 | 0 | 0.036476 | 0.003143 | 0 | 0 | 0 | 0 | 0 | 1 | 0.154812 | false | 0 | 0.020921 | 0.039749 | 0.330544 | 0.004184 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
273cdaee85fd54b52f7398082c212eaa97d51248 | 1,133 | py | Python | extra_credit/test_parenthetics.py | dave5801/data-structures | b23b7d6e2201325fe94c6fa5d6c0a33ea53be3cc | [
"MIT"
] | null | null | null | extra_credit/test_parenthetics.py | dave5801/data-structures | b23b7d6e2201325fe94c6fa5d6c0a33ea53be3cc | [
"MIT"
] | null | null | null | extra_credit/test_parenthetics.py | dave5801/data-structures | b23b7d6e2201325fe94c6fa5d6c0a33ea53be3cc | [
"MIT"
] | null | null | null | """Test valid parenthesis"""
def test_is_balanced():
"""Test is Balanced."""
from balanced_parens import is_balanced
assert is_balanced("()") == "balanced"
def test_broken():
"""Test if broken."""
from balanced_parens import is_balanced
assert is_balanced("))") == "broken"
def test_is_open():
"""Test is open."""
from balanced_parens import is_balanced
assert is_balanced("((") == "open"
def test_string_1():
"""Test from CodeWars."""
from balanced_parens import is_balanced
assert is_balanced(" (") == "open"
def test_string_2():
"""Test from CodeWars."""
from balanced_parens import is_balanced
assert is_balanced(")test") == "broken"
def test_string_3():
"""Test from CodeWars."""
from balanced_parens import is_balanced
assert is_balanced("") == "balanced"
def test_string_4():
"""Test from CodeWars."""
from balanced_parens import is_balanced
assert is_balanced("hi())(") == "broken"
def test_string_5():
"""Test from CodeWars."""
from balanced_parens import is_balanced
assert is_balanced("hi(hi)()") == "balanced" | 23.122449 | 48 | 0.660194 | 142 | 1,133 | 4.992958 | 0.147887 | 0.253879 | 0.203103 | 0.270804 | 0.77292 | 0.77292 | 0.77292 | 0.77292 | 0.77292 | 0.702398 | 0 | 0.00547 | 0.193292 | 1,133 | 49 | 48 | 23.122449 | 0.770241 | 0.150044 | 0 | 0.333333 | 0 | 0 | 0.083878 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 12 |
27778576b5961ea5513004a86ed0929f0b0c65fc | 115 | py | Python | class1-intro-to-python/my_module.py | spu-bigdataanalytics/materials | d95be1cf1d656a49527823df9c4d1fa302a95cc1 | [
"MIT"
] | null | null | null | class1-intro-to-python/my_module.py | spu-bigdataanalytics/materials | d95be1cf1d656a49527823df9c4d1fa302a95cc1 | [
"MIT"
] | 4 | 2020-03-24T18:03:37.000Z | 2021-08-23T20:31:59.000Z | class1-intro-to-python/my_module.py | spu-bigdataanalytics-193/materials | d95be1cf1d656a49527823df9c4d1fa302a95cc1 | [
"MIT"
] | null | null | null | def multipy(a, b):
return a * b
def divide(a, b, rounding_points=0):
return round(a / b, rounding_points) | 19.166667 | 40 | 0.652174 | 20 | 115 | 3.65 | 0.5 | 0.109589 | 0.273973 | 0.438356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011111 | 0.217391 | 115 | 6 | 40 | 19.166667 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
27a4d5e3f736a39204ceb8674631c1b592891076 | 219 | py | Python | controller/beverage_controller.py | WestHamster/VendingMachine | 3aa4b38af2a16810b8cabd6b1187498b92fbbc67 | [
"MIT"
] | null | null | null | controller/beverage_controller.py | WestHamster/VendingMachine | 3aa4b38af2a16810b8cabd6b1187498b92fbbc67 | [
"MIT"
] | null | null | null | controller/beverage_controller.py | WestHamster/VendingMachine | 3aa4b38af2a16810b8cabd6b1187498b92fbbc67 | [
"MIT"
] | null | null | null | class BeverageController(object):
def __init__(self,BeverageService):
self.BeverageService = BeverageService
def addBeverage(self,id,name,ingredients):
return self.BeverageService.addBeverage(id,name,ingredients) | 36.5 | 62 | 0.826484 | 23 | 219 | 7.695652 | 0.521739 | 0.322034 | 0.19209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.077626 | 219 | 6 | 62 | 36.5 | 0.876238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
fdc66d37d5e74b978e92e618d36a9306a2c6d59c | 20,677 | py | Python | src/stackuchin/__init__.py | Rungutan/stackuchin | 03f5cef9f0fe383341f6a59ce7d14ce2e05b28eb | [
"MIT"
] | 2 | 2020-10-06T13:30:40.000Z | 2020-10-11T22:48:21.000Z | src/stackuchin/__init__.py | Rungutan/stackuchin | 03f5cef9f0fe383341f6a59ce7d14ce2e05b28eb | [
"MIT"
] | null | null | null | src/stackuchin/__init__.py | Rungutan/stackuchin | 03f5cef9f0fe383341f6a59ce7d14ce2e05b28eb | [
"MIT"
] | 1 | 2020-06-07T02:16:26.000Z | 2020-06-07T02:16:26.000Z | import argparse
from argparse import RawTextHelpFormatter
import sys
import os
import yaml
PACKAGE_PARENT = '..'
SCRIPT_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser(__file__))))
sys.path.append(os.path.normpath(os.path.join(SCRIPT_DIR, PACKAGE_PARENT)))
from stackuchin.create import create
from stackuchin.delete import delete
from stackuchin.update import update
from stackuchin.start_pipeline import start_pipeline
class StackuchinCLI(object):
def __init__(self):
parser = argparse.ArgumentParser(
description='CLI tool to automatically create, update and delete AWS CloudFormation '
'stacks in multiple AWS accounts and regions at the same time',
usage='''stackuchin <command> [<args>]
To see help text, you can run:
stackuchin help
stackuchin version
stackuchin create --help
stackuchin delete --help
stackuchin update --help
stackuchin pipeline --help
''')
parser.add_argument('command', help='Command to run')
# parse_args defaults to [1:] for args, but you need to
# exclude the rest of the args too, or validation will fail
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
parser.print_help()
exit(1)
# use dispatch pattern to invoke method with same name
getattr(self, args.command)()
# noinspection PyMethodMayBeStatic
def version(self):
print("1.6.0")
# noinspection PyMethodMayBeStatic
def create(self):
parser = argparse.ArgumentParser(
description='Create command system',
formatter_class=RawTextHelpFormatter
)
parser.add_argument('--stack_file', dest="stack_file"
, default='./cloudformation-stacks.yaml'
, help="The YAML file which contains your stack definitions.\n"
"Defaults to \"./cloudformation-stacks.yaml\" if not specified.")
parser.add_argument('--stack_name', dest="stack_name", required=True
, help="The stack that you wish to create")
parser.add_argument('--secret', dest="secret", required=False, default=None
, action='append', metavar='Parameter=Value'
, help='Argument used to specify values for NoEcho parameters in your stack')
parser.add_argument('--slack_webhook', dest="slack_webhook", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_SLACK.\n'
'If argument is specified, any notifications will be sent to this URL.\n'
'If not specified, the script will check for env var STACKUCHIN_SLACK.\n'
'If neither argument nor environment variable is specified, then no notifications '
'will be sent.')
parser.add_argument('--s3_bucket', dest="s3_bucket", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_NAME.\n'
'If argument is specified, then the template is first uploaded here before '
'used in the stack.\n'
'If not specified, the script will check for env var STACKUCHIN_BUCKET_NAME.\n'
'If neither argument nor environment variable is specified, then the script will '
'attempt to feed the template directly to the AWS API call, however, due to '
'AWS CloudFormation API call limitations, you might end up with a bigger template '
'in byte size than the max value allowed by AWS.\n'
'Details here -> https://docs.aws.amazon.com/AWSCloudFormation/latest/'
'UserGuide/cloudformation-limits.html')
parser.add_argument('--s3_prefix', dest="s3_prefix", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_PREFIX.\n'
'The bucket prefix path to be used when the S3 bucket is defined.')
parser.add_argument('--only_errors', dest="only_errors", required=False, default=False, action="store_true"
, help='By default, all notifications are sent to Slack if slack_webhook is defined.\n'
'By running this command you ensure that only errors are getting pushed.\n'
'This is useful in case you don\'t want to see COMPLETE and START notifications.')
parser.add_argument('-p', '--profile', dest='profile', default=None
, help='The AWS profile you\'ll be using.\n'
'If not specified, the "default" profile will be used. \n'
'If no profiles are defined, then the default AWS credential mechanism starts.\n')
args = parser.parse_args(sys.argv[2:])
slack_webhook_url = None
if args.slack_webhook is not None:
slack_webhook_url = args.slack_webhook
else:
if "STACKUCHIN_SLACK" in os.environ:
slack_webhook_url = os.environ.get('STACKUCHIN_SLACK')
s3_bucket = None
if args.s3_bucket is not None:
s3_bucket = args.s3_bucket
else:
if "STACKUCHIN_BUCKET_NAME" in os.environ:
s3_bucket = os.environ.get('STACKUCHIN_BUCKET_NAME')
s3_prefix = None
if args.s3_prefix is not None:
s3_prefix = args.s3_prefix
else:
if "STACKUCHIN_BUCKET_PREFIX" in os.environ:
s3_prefix = os.environ.get('STACKUCHIN_BUCKET_PREFIX')
create(args.profile, args.stack_file, args.stack_name,
args.secret, slack_webhook_url, s3_bucket, s3_prefix, args.only_errors)
# noinspection PyMethodMayBeStatic
def delete(self):
parser = argparse.ArgumentParser(
description='Delete command system',
formatter_class=RawTextHelpFormatter
)
parser.add_argument('--stack_file', dest="stack_file"
, default='./cloudformation-stacks.yaml'
, help="The YAML file which contains your stack definitions.\n"
"Defaults to \"./cloudformation-stacks.yaml\" if not specified.")
parser.add_argument('--stack_name', dest="stack_name", required=True
, help="The stack that you wish to create")
parser.add_argument('--slack_webhook', dest="slack_webhook", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_SLACK.\n'
'If argument is specified, any notifications will be sent to this URL.\n'
'If not specified, the script will check for env var STACKUCHIN_SLACK.\n'
'If neither argument nor environment variable is specified, then no notifications '
'will be sent.')
parser.add_argument('--only_errors', dest="only_errors", required=False, default=False, action="store_true"
, help='By default, all notifications are sent to Slack if slack_webhook is defined.\n'
'By running this command you ensure that only errors are getting pushed.\n'
'This is useful in case you don\'t want to see COMPLETE and START notifications.')
parser.add_argument('-p', '--profile', dest='profile', default=None
, help='The AWS profile you\'ll be using.\n'
'If not specified, the "default" profile will be used. \n'
'If no profiles are defined, then the default AWS credential mechanism starts.\n')
args = parser.parse_args(sys.argv[2:])
slack_webhook_url = None
if args.slack_webhook is not None:
slack_webhook_url = args.slack_webhook
else:
if "STACKUCHIN_SLACK" in os.environ:
slack_webhook_url = os.environ.get('STACKUCHIN_SLACK')
delete(args.profile, args.stack_file, args.stack_name, slack_webhook_url, args.only_errors)
# noinspection PyMethodMayBeStatic
def update(self):
parser = argparse.ArgumentParser(
description='Update command system',
formatter_class=RawTextHelpFormatter
)
parser.add_argument('--stack_file', dest="stack_file"
, default='./cloudformation-stacks.yaml'
, help="The YAML file which contains your stack definitions.\n"
"Defaults to \"./cloudformation-stacks.yaml\" if not specified.")
parser.add_argument('--stack_name', dest="stack_name", required=True
, help="The stack that you wish to update")
parser.add_argument('--secret', dest="secret", required=False, default=None
, action='append', metavar='Parameter=Value'
, help='Argument used to specify values for NoEcho parameters in your stack')
parser.add_argument('--slack_webhook', dest="slack_webhook", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_SLACK.\n'
'If argument is specified, any notifications will be sent to this URL.\n'
'If not specified, the script will check for env var STACKUCHIN_SLACK.\n'
'If neither argument nor environment variable is specified, then no notifications '
'will be sent.')
parser.add_argument('--s3_bucket', dest="s3_bucket", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_NAME.\n'
'If argument is specified, then the template is first uploaded here before '
'used in the stack.\n'
'If not specified, the script will check for env var STACKUCHIN_BUCKET_NAME.\n'
'If neither argument nor environment variable is specified, then the script will '
'attempt to feed the template directly to the AWS API call, however, due to '
'AWS CloudFormation API call limitations, you might end up with a bigger template '
'in byte size than the max value allowed by AWS.\n'
'Details here -> https://docs.aws.amazon.com/AWSCloudFormation/latest/'
'UserGuide/cloudformation-limits.html')
parser.add_argument('--s3_prefix', dest="s3_prefix", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_PREFIX.\n'
'The bucket prefix path to be used when the S3 bucket is defined.')
parser.add_argument('--only_errors', dest="only_errors", required=False, default=False, action="store_true"
, help='By default, all notifications are sent to Slack if slack_webhook is defined.\n'
'By running this command you ensure that only errors are getting pushed.\n'
'This is useful in case you don\'t want to see COMPLETE and START notifications.')
parser.add_argument('-p', '--profile', dest='profile', default=None
, help='The AWS profile you\'ll be using.\n'
'If not specified, the "default" profile will be used. \n'
'If no profiles are defined, then the default AWS credential mechanism starts.\n')
args = parser.parse_args(sys.argv[2:])
slack_webhook_url = None
if args.slack_webhook is not None:
slack_webhook_url = args.slack_webhook
else:
if "STACKUCHIN_SLACK" in os.environ:
slack_webhook_url = os.environ.get('STACKUCHIN_SLACK')
s3_bucket = None
if args.s3_bucket is not None:
s3_bucket = args.s3_bucket
else:
if "STACKUCHIN_BUCKET_NAME" in os.environ:
s3_bucket = os.environ.get('STACKUCHIN_BUCKET_NAME')
s3_prefix = None
if args.s3_prefix is not None:
s3_prefix = args.s3_prefix
else:
if "STACKUCHIN_BUCKET_PREFIX" in os.environ:
s3_prefix = os.environ.get('STACKUCHIN_BUCKET_PREFIX')
update(args.profile, args.stack_file, args.stack_name, args.secret,
slack_webhook_url, s3_bucket, s3_prefix, args.only_errors)
# noinspection PyMethodMayBeStatic
def pipeline(self):
parser = argparse.ArgumentParser(
description='Create command system',
formatter_class=RawTextHelpFormatter
)
parser.add_argument('--stack_file', dest="stack_file"
, default='./cloudformation-stacks.yaml'
, help="The YAML file which contains your stack definitions.\n"
"Defaults to \"./cloudformation-stacks.yaml\" if not specified.")
parser.add_argument('--pipeline_file', dest="pipeline_file", required=True
, help="The pipeline definition file to run your deployments.")
parser.add_argument('--slack_webhook', dest="slack_webhook", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_SLACK.\n'
'If argument is specified, any notifications will be sent to this URL.\n'
'If not specified, the script will check for env var STACKUCHIN_SLACK.\n'
'If neither argument nor environment variable is specified, then no notifications '
'will be sent.')
parser.add_argument('--s3_bucket', dest="s3_bucket", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_NAME.\n'
'If argument is specified, then the template is first uploaded here before '
'used in the stack.\n'
'If not specified, the script will check for env var STACKUCHIN_BUCKET_NAME.\n'
'If neither argument nor environment variable is specified, then the script will '
'attempt to feed the template directly to the AWS API call, however, due to '
'AWS CloudFormation API call limitations, you might end up with a bigger template '
'in byte size than the max value allowed by AWS.\n'
'Details here -> https://docs.aws.amazon.com/AWSCloudFormation/latest/'
'UserGuide/cloudformation-limits.html')
parser.add_argument('--s3_prefix', dest="s3_prefix", required=False, default=None
, help='Argument used to overwrite environment variable STACKUCHIN_BUCKET_PREFIX.\n'
'The bucket prefix path to be used when the S3 bucket is defined.')
parser.add_argument('--only_errors', dest="only_errors", required=False, default=False, action="store_true"
, help='By default, all notifications are sent to Slack if slack_webhook is defined.\n'
'By running this command you ensure that only errors are getting pushed.\n'
'This is useful in case you don\'t want to see COMPLETE and START notifications.')
parser.add_argument('-p', '--profile', dest='profile', default=None
, help='The AWS profile you\'ll be using.\n'
'If not specified, the "default" profile will be used. \n'
'If no profiles are defined, then the default AWS credential mechanism starts.\n')
args = parser.parse_args(sys.argv[2:])
stacks = None
try:
with open(args.stack_file, 'r') as stack_stream:
stacks = yaml.safe_load(stack_stream)
except yaml.YAMLError as exc:
print(exc)
exit(1)
pipeline = None
try:
with open(args.pipeline_file, 'r') as pipeline_stream:
pipeline = yaml.safe_load(pipeline_stream)
except yaml.YAMLError as exc:
print(exc)
exit(1)
if 'pipeline' not in pipeline:
print("The pipeline_file {} must contain a top-level object called \"pipeline\".".format(
args.pipeline_file))
exit(1)
if 'pipeline_type' in pipeline['pipeline']:
if str(pipeline['pipeline']['pipeline_type']).lower() not in ['parallel', 'sequential']:
print("The value for \"pipeline_type\" can be either \"parallel\" or \"sequential\".")
print("If not specified, the default value is \"sequential\".")
exit(1)
if 'update' not in pipeline['pipeline'] and \
'delete' not in pipeline['pipeline'] and \
'create' not in pipeline['pipeline']:
print("An action type of either \"update\", \"create\", or \"delete\" must be defined "
"in the \"pipeline\" object definition.")
exit(1)
for action in ["update", "create", "delete"]:
if action in pipeline["pipeline"]:
if type(pipeline["pipeline"][action]) is not list:
print("Expected a list of inputs for the command {}.".format(action))
exit(1)
for item in pipeline["pipeline"][action]:
if "stack_name" not in item:
print("A property with key \"stack_name\" must be present in each item "
"for the {} command.".format(action))
exit(1)
if "no_echo" in item:
if type(item["no_echo"]) is not list:
print("If you want to specify \"secrets\", make sure they "
"are a list of Name/Value objects.")
exit(1)
for secret in item["no_echo"]:
if "Name" not in secret or "Value" not in secret:
print("You must specify a combination of Name/Value objects "
"for each item in your secrets")
exit(1)
slack_webhook_url = None
if args.slack_webhook is not None:
slack_webhook_url = args.slack_webhook
else:
if "STACKUCHIN_SLACK" in os.environ:
slack_webhook_url = os.environ.get('STACKUCHIN_SLACK')
s3_bucket = None
if args.s3_bucket is not None:
s3_bucket = args.s3_bucket
else:
if "STACKUCHIN_BUCKET_NAME" in os.environ:
s3_bucket = os.environ.get('STACKUCHIN_BUCKET_NAME')
s3_prefix = None
if args.s3_prefix is not None:
s3_prefix = args.s3_prefix
else:
if "STACKUCHIN_BUCKET_PREFIX" in os.environ:
s3_prefix = os.environ.get('STACKUCHIN_BUCKET_PREFIX')
start_pipeline(args.profile, args.stack_file, args.pipeline_file,
slack_webhook_url, s3_bucket, s3_prefix, args.only_errors)
def main():
StackuchinCLI()
if __name__ == '__main__':
StackuchinCLI()
| 58.908832 | 118 | 0.569715 | 2,316 | 20,677 | 4.965889 | 0.107081 | 0.037562 | 0.042866 | 0.025041 | 0.805495 | 0.784714 | 0.775846 | 0.770629 | 0.767412 | 0.767412 | 0 | 0.005269 | 0.34831 | 20,677 | 350 | 119 | 59.077143 | 0.848237 | 0.015911 | 0 | 0.718954 | 0 | 0 | 0.386991 | 0.035744 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022876 | false | 0 | 0.029412 | 0 | 0.055556 | 0.039216 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fddfb4376fee417763810ae32b3a4e06e66b1b2c | 70,761 | py | Python | ximpia/xpsite/migrations/0001_initial.py | Ximpia/ximpia | ed2ad22faa42ceca2bde782a47624e5a6ef60e3b | [
"Apache-2.0"
] | 1 | 2020-09-11T01:54:24.000Z | 2020-09-11T01:54:24.000Z | ximpia/xpsite/migrations/0001_initial.py | Ximpia/ximpia | ed2ad22faa42ceca2bde782a47624e5a6ef60e3b | [
"Apache-2.0"
] | null | null | null | ximpia/xpsite/migrations/0001_initial.py | Ximpia/ximpia | ed2ad22faa42ceca2bde782a47624e5a6ef60e3b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Param'
db.create_table('SITE_PARAMETER', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_PARAMETER')),
('mode', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, db_column='MODE', blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=20, db_column='NAME')),
('value', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, db_column='VALUE', blank=True)),
('paramType', self.gf('django.db.models.fields.CharField')(default='string', max_length=10, db_column='PARAM_TYPE')),
))
db.send_create_signal(u'xpsite', ['Param'])
# Adding model 'MetaKey'
db.create_table('SITE_META_KEY', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_META_KEY')),
('name', self.gf('django.db.models.fields.CharField')(max_length=100, db_column='NAME')),
('keyType', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Param'], db_column='ID_SITE_PARAMETER')),
))
db.send_create_signal(u'xpsite', ['MetaKey'])
# Adding model 'TagMode'
db.create_table('SITE_TAG_MODE', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_TAG_MODE')),
('mode', self.gf('django.db.models.fields.CharField')(max_length=30, db_column='MODE')),
('isPublic', self.gf('django.db.models.fields.BooleanField')(default=True, db_column='IS_PUBLIC')),
))
db.send_create_signal(u'xpsite', ['TagMode'])
# Adding model 'Tag'
db.create_table('SITE_TAG', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_TAG')),
('name', self.gf('django.db.models.fields.CharField')(max_length=30, db_column='NAME')),
('mode', self.gf('django.db.models.fields.related.ForeignKey')(related_name='tag_mode', db_column='ID_MODE', to=orm['xpsite.TagMode'])),
('popularity', self.gf('django.db.models.fields.IntegerField')(default=1, null=True, db_column='POPULARITY', blank=True)),
('isPublic', self.gf('django.db.models.fields.BooleanField')(default=True, db_column='IS_PUBLIC')),
))
db.send_create_signal(u'xpsite', ['Tag'])
# Adding model 'Address'
db.create_table('SITE_ADDRESS', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_ADDRESS')),
('street', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, db_column='STREET', blank=True)),
('city', self.gf('django.db.models.fields.CharField')(max_length=20, db_column='CITY')),
('region', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, db_column='REGION', blank=True)),
('zipCode', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, db_column='ZIP_CODE', blank=True)),
('country', self.gf('django.db.models.fields.CharField')(max_length=2, db_column='COUNTRY')),
('long', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=18, decimal_places=12, blank=True)),
('lat', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=18, decimal_places=12, blank=True)),
))
db.send_create_signal(u'xpsite', ['Address'])
# Adding model 'UserChannel'
db.create_table('SITE_USER', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER')),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], db_column='ID_USER')),
('title', self.gf('django.db.models.fields.CharField')(max_length=20, db_column='TITLE')),
('name', self.gf('django.db.models.fields.CharField')(default='user', max_length=20, db_column='NAME')),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Tag'], null=True, db_column='ID_TAG', blank=True)),
))
db.send_create_signal(u'xpsite', ['UserChannel'])
# Adding unique constraint on 'UserChannel', fields ['user', 'name']
db.create_unique('SITE_USER', ['ID_USER', 'NAME'])
# Adding model 'Category'
db.create_table('SITE_CATEGORY', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_CATEGORY')),
('name', self.gf('django.db.models.fields.CharField')(max_length=55, db_column='NAME')),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=200, db_column='SLUG')),
('description', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, db_column='DESCRIPTION', blank=True)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='category_parent', null=True, db_column='ID_PARENT', to=orm['xpsite.Category'])),
('image', self.gf('filebrowser.fields.FileBrowseField')(max_length=200, null=True, db_column='IMAGE', blank=True)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Param'], db_column='ID_SITE_PARAMETER')),
('isPublished', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_PUBLISHED')),
('isPublic', self.gf('django.db.models.fields.BooleanField')(default=True, db_column='IS_PUBLIC')),
('popularity', self.gf('django.db.models.fields.IntegerField')(default=1, null=True, db_column='POPULARITY', blank=True)),
('menuOrder', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=1, db_column='MENU_ORDER')),
))
db.send_create_signal(u'xpsite', ['Category'])
# Adding model 'SocialNetworkUser'
db.create_table('SITE_SOCIAL_NETWORK_USER', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_SOCIAL_NETWORK_USER')),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], db_column='ID_USER')),
('socialNetwork', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpcore.CoreParam'], db_column='ID_CORE_PARAMETER')),
('socialId', self.gf('django.db.models.fields.BigIntegerField')(db_column='SOCIAL_ID')),
('token', self.gf('django.db.models.fields.CharField')(max_length=255, db_column='TOKEN')),
('tokenSecret', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, db_column='TOKEN_SECRET', blank=True)),
))
db.send_create_signal(u'xpsite', ['SocialNetworkUser'])
# Adding unique constraint on 'SocialNetworkUser', fields ['user', 'socialNetwork']
db.create_unique('SITE_SOCIAL_NETWORK_USER', ['ID_USER', 'ID_CORE_PARAMETER'])
# Adding model 'Setting'
db.create_table('SITE_SETTING', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_SETTING')),
('application', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='site_setting_app', null=True, db_column='ID_CORE_APPLICATION', to=orm['xpcore.Application'])),
('name', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.MetaKey'], db_column='ID_META')),
('value', self.gf('django.db.models.fields.TextField')(db_column='VALUE')),
('description', self.gf('django.db.models.fields.CharField')(max_length=255, db_column='DESCRIPTION')),
('mustAutoload', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='MUST_AUTOLOAD')),
))
db.send_create_signal(u'xpsite', ['Setting'])
# Adding model 'UserMeta'
db.create_table('SITE_USER_META', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER_PROFILE')),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], db_column='ID_USER')),
('meta', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.MetaKey'], db_column='ID_META')),
('value', self.gf('django.db.models.fields.TextField')(db_column='VALUE')),
))
db.send_create_signal(u'xpsite', ['UserMeta'])
# Adding model 'UserProfile'
db.create_table('SITE_USER_PROFILE', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER_PROFILE')),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], db_column='ID_USER')),
('image', self.gf('filebrowser.fields.FileBrowseField')(max_length=200, null=True, db_column='IMAGE', blank=True)),
('status', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Param'], db_column='ID_SITE_PARAMETER')),
))
db.send_create_signal(u'xpsite', ['UserProfile'])
# Adding model 'UserAddress'
db.create_table('SITE_USER_ADDRESS', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER_ADDRESS')),
('userProfile', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.UserProfile'], db_column='ID_SITE_USER_PROFILE')),
('address', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Address'], db_column='ID_ADDRESS')),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Param'], db_column='ID_SITE_PARAMETER')),
))
db.send_create_signal(u'xpsite', ['UserAddress'])
# Adding model 'Group'
db.create_table('SITE_GROUP', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_GROUP')),
('group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.Group'], unique=True, db_column='ID_GROUP_SYS')),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='groupchannel_parent', null=True, db_column='ID_PARENT', to=orm['xpsite.Group'])),
('groupNameId', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, db_column='GROUP_NAME_ID', blank=True)),
('isPublic', self.gf('django.db.models.fields.BooleanField')(default=True, db_column='IS_PUBLIC')),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Category'], db_column='ID_CATEGORY')),
))
db.send_create_signal(u'xpsite', ['Group'])
# Adding model 'GroupAccess'
db.create_table('SITE_GROUP_ACCESS', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_GROUP_ACCESS')),
('groupFrom', self.gf('django.db.models.fields.related.ForeignKey')(related_name='groupaccess_from', db_column='ID_GROUP_FROM', to=orm['xpsite.Group'])),
('groupTo', self.gf('django.db.models.fields.related.ForeignKey')(related_name='groupaccess_to', db_column='ID_GROUP_TO', to=orm['xpsite.Group'])),
))
db.send_create_signal(u'xpsite', ['GroupAccess'])
# Adding model 'UserChannelGroup'
db.create_table('SITE_USER_GROUP', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER_GROUP')),
('group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Group'], db_column='ID_GROUP')),
('userChannel', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.UserChannel'], db_column='ID_USER_CHANNEL')),
))
db.send_create_signal(u'xpsite', ['UserChannelGroup'])
# Adding model 'GroupTag'
db.create_table('SITE_GROUP_TAG', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_GROUP_TAG')),
('group', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Group'], db_column='ID_GROUP')),
('tag', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Tag'], db_column='ID_TAG')),
))
db.send_create_signal(u'xpsite', ['GroupTag'])
# Adding model 'SignupData'
db.create_table('SITE_SIGNUP_DATA', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_SIGNUP_DATA')),
('user', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30, db_column='USER')),
('activationCode', self.gf('django.db.models.fields.PositiveSmallIntegerField')(db_column='ACTIVATION_CODE')),
('data', self.gf('django.db.models.fields.TextField')(db_column='DATA')),
))
db.send_create_signal(u'xpsite', ['SignupData'])
# Adding model 'Invitation'
db.create_table('SITE_INVITATION', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_INVITATION')),
('fromUser', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], db_column='ID_USER')),
('invitationCode', self.gf('django.db.models.fields.CharField')(unique=True, max_length=10, db_column='INVITATION_CODE')),
('email', self.gf('django.db.models.fields.EmailField')(unique=True, max_length=75, db_column='EMAIL')),
('status', self.gf('django.db.models.fields.CharField')(default='pending', max_length=10, db_column='STATUS')),
('number', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=1, db_column='NUMBER')),
('message', self.gf('django.db.models.fields.TextField')(null=True, db_column='MESSAGE', blank=True)),
))
db.send_create_signal(u'xpsite', ['Invitation'])
# Adding model 'InvitationMeta'
db.create_table('SITE_INVITATION_META', (
('dateCreate', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, db_column='DATE_CREATE', blank=True)),
('dateModify', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, db_column='DATE_MODIFY', blank=True)),
('userCreateId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_CREATE_ID', blank=True)),
('userModifyId', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='USER_MODIFY_ID', blank=True)),
('isDeleted', self.gf('django.db.models.fields.BooleanField')(default=False, db_column='IS_DELETED')),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True, db_column='ID_SITE_USER_PROFILE')),
('invitation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.Invitation'], db_column='ID_INVITATION')),
('meta', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['xpsite.MetaKey'], db_column='ID_META')),
('value', self.gf('django.db.models.fields.TextField')(db_column='VALUE')),
))
db.send_create_signal(u'xpsite', ['InvitationMeta'])
def backwards(self, orm):
# Removing unique constraint on 'SocialNetworkUser', fields ['user', 'socialNetwork']
db.delete_unique('SITE_SOCIAL_NETWORK_USER', ['ID_USER', 'ID_CORE_PARAMETER'])
# Removing unique constraint on 'UserChannel', fields ['user', 'name']
db.delete_unique('SITE_USER', ['ID_USER', 'NAME'])
# Deleting model 'Param'
db.delete_table('SITE_PARAMETER')
# Deleting model 'MetaKey'
db.delete_table('SITE_META_KEY')
# Deleting model 'TagMode'
db.delete_table('SITE_TAG_MODE')
# Deleting model 'Tag'
db.delete_table('SITE_TAG')
# Deleting model 'Address'
db.delete_table('SITE_ADDRESS')
# Deleting model 'UserChannel'
db.delete_table('SITE_USER')
# Deleting model 'Category'
db.delete_table('SITE_CATEGORY')
# Deleting model 'SocialNetworkUser'
db.delete_table('SITE_SOCIAL_NETWORK_USER')
# Deleting model 'Setting'
db.delete_table('SITE_SETTING')
# Deleting model 'UserMeta'
db.delete_table('SITE_USER_META')
# Deleting model 'UserProfile'
db.delete_table('SITE_USER_PROFILE')
# Deleting model 'UserAddress'
db.delete_table('SITE_USER_ADDRESS')
# Deleting model 'Group'
db.delete_table('SITE_GROUP')
# Deleting model 'GroupAccess'
db.delete_table('SITE_GROUP_ACCESS')
# Deleting model 'UserChannelGroup'
db.delete_table('SITE_USER_GROUP')
# Deleting model 'GroupTag'
db.delete_table('SITE_GROUP_TAG')
# Deleting model 'SignupData'
db.delete_table('SITE_SIGNUP_DATA')
# Deleting model 'Invitation'
db.delete_table('SITE_INVITATION')
# Deleting model 'InvitationMeta'
db.delete_table('SITE_INVITATION_META')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'xpcore.application': {
'Meta': {'object_name': 'Application', 'db_table': "'CORE_APPLICATION'"},
'accessGroup': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'app_access'", 'db_column': "'ID_GROUP'", 'to': u"orm['xpsite.Group']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Category']", 'null': 'True', 'db_column': "'ID_CATEGORY'", 'blank': 'True'}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'db_column': "'ID_DEVELOPER'", 'blank': 'True'}),
'developerOrg': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'app_dev_org'", 'null': 'True', 'db_column': "'ID_DEVELOPER_ORG'", 'to': u"orm['xpsite.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_CORE_APPLICATION'"}),
'isAdmin': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_ADMIN'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'isPrivate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_PRIVATE'"}),
'isSubscription': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_SUBSCRIPTION'"}),
'meta': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'app_meta'", 'symmetrical': 'False', 'through': u"orm['xpcore.ApplicationMeta']", 'to': u"orm['xpcore.MetaKey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.Application']", 'null': 'True', 'db_column': "'ID_PARENT'", 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30', 'db_column': "'SLUG'"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'application_tags'", 'to': u"orm['xpsite.Tag']", 'through': u"orm['xpcore.ApplicationTag']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_column': "'TITLE'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpcore.applicationmeta': {
'Meta': {'object_name': 'ApplicationMeta', 'db_table': "'CORE_APPLICATION_META'"},
'application': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.Application']", 'db_column': "'ID_APPLICATION'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_CORE_APPLICATION_META'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'meta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.MetaKey']", 'db_column': "'ID_META'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'db_column': "'VALUE'"})
},
u'xpcore.applicationtag': {
'Meta': {'object_name': 'ApplicationTag', 'db_table': "'CORE_APPLICATION_TAG'"},
'application': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.Application']", 'db_column': "'ID_VIEW'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_CORE_APPLICATION_TAG'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Tag']", 'db_column': "'ID_TAG'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpcore.coreparam': {
'Meta': {'object_name': 'CoreParam', 'db_table': "'CORE_PARAMETER'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_CORE_PARAMETER'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'MODE'", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_column': "'NAME'"}),
'paramType': ('django.db.models.fields.CharField', [], {'default': "'string'", 'max_length': '10', 'db_column': "'PARAM_TYPE'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'db_column': "'VALUE'", 'blank': 'True'})
},
u'xpcore.metakey': {
'Meta': {'ordering': "['name']", 'object_name': 'MetaKey', 'db_table': "'CORE_META_KEY'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_CORE_META_KEY'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'keyType': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.CoreParam']", 'db_column': "'ID_META_TYPE'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_column': "'NAME'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.address': {
'Meta': {'object_name': 'Address', 'db_table': "'SITE_ADDRESS'"},
'city': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_column': "'CITY'"}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_column': "'COUNTRY'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_ADDRESS'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'lat': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '18', 'decimal_places': '12', 'blank': 'True'}),
'long': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '18', 'decimal_places': '12', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'REGION'", 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_column': "'STREET'", 'blank': 'True'}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'zipCode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'ZIP_CODE'", 'blank': 'True'})
},
u'xpsite.category': {
'Meta': {'object_name': 'Category', 'db_table': "'SITE_CATEGORY'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_column': "'DESCRIPTION'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_CATEGORY'"}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '200', 'null': 'True', 'db_column': "'IMAGE'", 'blank': 'True'}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'isPublic': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_column': "'IS_PUBLIC'"}),
'isPublished': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_PUBLISHED'"}),
'menuOrder': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1', 'db_column': "'MENU_ORDER'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '55', 'db_column': "'NAME'"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'category_parent'", 'null': 'True', 'db_column': "'ID_PARENT'", 'to': u"orm['xpsite.Category']"}),
'popularity': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'db_column': "'POPULARITY'", 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200', 'db_column': "'SLUG'"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Param']", 'db_column': "'ID_SITE_PARAMETER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.group': {
'Meta': {'object_name': 'Group', 'db_table': "'SITE_GROUP'"},
'accessGroups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_access'", 'symmetrical': 'False', 'through': u"orm['xpsite.GroupAccess']", 'to': u"orm['xpsite.Group']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Category']", 'db_column': "'ID_CATEGORY'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'db_column': "'ID_GROUP_SYS'"}),
'groupNameId': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'GROUP_NAME_ID'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_GROUP'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'isPublic': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_column': "'IS_PUBLIC'"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'groupchannel_parent'", 'null': 'True', 'db_column': "'ID_PARENT'", 'to': u"orm['xpsite.Group']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'groupchannel_tags'", 'to': u"orm['xpsite.Tag']", 'through': u"orm['xpsite.GroupTag']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.groupaccess': {
'Meta': {'object_name': 'GroupAccess', 'db_table': "'SITE_GROUP_ACCESS'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'groupFrom': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groupaccess_from'", 'db_column': "'ID_GROUP_FROM'", 'to': u"orm['xpsite.Group']"}),
'groupTo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'groupaccess_to'", 'db_column': "'ID_GROUP_TO'", 'to': u"orm['xpsite.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_GROUP_ACCESS'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.grouptag': {
'Meta': {'object_name': 'GroupTag', 'db_table': "'SITE_GROUP_TAG'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Group']", 'db_column': "'ID_GROUP'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_GROUP_TAG'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Tag']", 'db_column': "'ID_TAG'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.invitation': {
'Meta': {'object_name': 'Invitation', 'db_table': "'SITE_INVITATION'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75', 'db_column': "'EMAIL'"}),
'fromUser': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'ID_USER'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_INVITATION'"}),
'invitationCode': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10', 'db_column': "'INVITATION_CODE'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'MESSAGE'", 'blank': 'True'}),
'meta': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'invitation_meta'", 'symmetrical': 'False', 'through': u"orm['xpsite.InvitationMeta']", 'to': u"orm['xpsite.MetaKey']"}),
'number': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1', 'db_column': "'NUMBER'"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '10', 'db_column': "'STATUS'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.invitationmeta': {
'Meta': {'object_name': 'InvitationMeta', 'db_table': "'SITE_INVITATION_META'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER_PROFILE'"}),
'invitation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Invitation']", 'db_column': "'ID_INVITATION'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'meta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.MetaKey']", 'db_column': "'ID_META'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'db_column': "'VALUE'"})
},
u'xpsite.metakey': {
'Meta': {'ordering': "['name']", 'object_name': 'MetaKey', 'db_table': "'SITE_META_KEY'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_META_KEY'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'keyType': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Param']", 'db_column': "'ID_SITE_PARAMETER'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_column': "'NAME'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.param': {
'Meta': {'object_name': 'Param', 'db_table': "'SITE_PARAMETER'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_PARAMETER'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'MODE'", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_column': "'NAME'"}),
'paramType': ('django.db.models.fields.CharField', [], {'default': "'string'", 'max_length': '10', 'db_column': "'PARAM_TYPE'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'db_column': "'VALUE'", 'blank': 'True'})
},
u'xpsite.setting': {
'Meta': {'object_name': 'Setting', 'db_table': "'SITE_SETTING'"},
'application': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'site_setting_app'", 'null': 'True', 'db_column': "'ID_CORE_APPLICATION'", 'to': u"orm['xpcore.Application']"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "'DESCRIPTION'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_SETTING'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'mustAutoload': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'MUST_AUTOLOAD'"}),
'name': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.MetaKey']", 'db_column': "'ID_META'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'db_column': "'VALUE'"})
},
u'xpsite.signupdata': {
'Meta': {'object_name': 'SignupData', 'db_table': "'SITE_SIGNUP_DATA'"},
'activationCode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'db_column': "'ACTIVATION_CODE'"}),
'data': ('django.db.models.fields.TextField', [], {'db_column': "'DATA'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_SIGNUP_DATA'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30', 'db_column': "'USER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.socialnetworkuser': {
'Meta': {'unique_together': "(('user', 'socialNetwork'),)", 'object_name': 'SocialNetworkUser', 'db_table': "'SITE_SOCIAL_NETWORK_USER'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_SOCIAL_NETWORK_USER'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'socialId': ('django.db.models.fields.BigIntegerField', [], {'db_column': "'SOCIAL_ID'"}),
'socialNetwork': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpcore.CoreParam']", 'db_column': "'ID_CORE_PARAMETER'"}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "'TOKEN'"}),
'tokenSecret': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_column': "'TOKEN_SECRET'", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'ID_USER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.tag': {
'Meta': {'ordering': "['-popularity']", 'object_name': 'Tag', 'db_table': "'SITE_TAG'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_TAG'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'isPublic': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_column': "'IS_PUBLIC'"}),
'mode': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_mode'", 'db_column': "'ID_MODE'", 'to': u"orm['xpsite.TagMode']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_column': "'NAME'"}),
'popularity': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'db_column': "'POPULARITY'", 'blank': 'True'}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.tagmode': {
'Meta': {'object_name': 'TagMode', 'db_table': "'SITE_TAG_MODE'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_TAG_MODE'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'isPublic': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_column': "'IS_PUBLIC'"}),
'mode': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_column': "'MODE'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.useraddress': {
'Meta': {'object_name': 'UserAddress', 'db_table': "'SITE_USER_ADDRESS'"},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Address']", 'db_column': "'ID_ADDRESS'"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER_ADDRESS'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Param']", 'db_column': "'ID_SITE_PARAMETER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'userProfile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.UserProfile']", 'db_column': "'ID_SITE_USER_PROFILE'"})
},
u'xpsite.userchannel': {
'Meta': {'unique_together': "(('user', 'name'),)", 'object_name': 'UserChannel', 'db_table': "'SITE_USER'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_groups'", 'symmetrical': 'False', 'through': u"orm['xpsite.UserChannelGroup']", 'to': u"orm['xpsite.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'name': ('django.db.models.fields.CharField', [], {'default': "'user'", 'max_length': '20', 'db_column': "'NAME'"}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Tag']", 'null': 'True', 'db_column': "'ID_TAG'", 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '20', 'db_column': "'TITLE'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'ID_USER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.userchannelgroup': {
'Meta': {'object_name': 'UserChannelGroup', 'db_table': "'SITE_USER_GROUP'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Group']", 'db_column': "'ID_GROUP'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER_GROUP'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'userChannel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.UserChannel']", 'db_column': "'ID_USER_CHANNEL'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
},
u'xpsite.usermeta': {
'Meta': {'object_name': 'UserMeta', 'db_table': "'SITE_USER_META'"},
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER_PROFILE'"}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'meta': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.MetaKey']", 'db_column': "'ID_META'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'ID_USER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'}),
'value': ('django.db.models.fields.TextField', [], {'db_column': "'VALUE'"})
},
u'xpsite.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'SITE_USER_PROFILE'"},
'addresses': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'userprofile_addresses'", 'symmetrical': 'False', 'through': u"orm['xpsite.UserAddress']", 'to': u"orm['xpsite.Address']"}),
'dateCreate': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_column': "'DATE_CREATE'", 'blank': 'True'}),
'dateModify': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'db_column': "'DATE_MODIFY'", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ID_SITE_USER_PROFILE'"}),
'image': ('filebrowser.fields.FileBrowseField', [], {'max_length': '200', 'null': 'True', 'db_column': "'IMAGE'", 'blank': 'True'}),
'isDeleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'IS_DELETED'"}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['xpsite.Param']", 'db_column': "'ID_SITE_PARAMETER'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'db_column': "'ID_USER'"}),
'userCreateId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_CREATE_ID'", 'blank': 'True'}),
'userModifyId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'USER_MODIFY_ID'", 'blank': 'True'})
}
}
complete_apps = ['xpsite'] | 97.466942 | 245 | 0.616964 | 8,225 | 70,761 | 5.12 | 0.026869 | 0.087006 | 0.151928 | 0.21704 | 0.905229 | 0.883667 | 0.868066 | 0.858425 | 0.839642 | 0.816822 | 0 | 0.003279 | 0.15534 | 70,761 | 726 | 246 | 97.466942 | 0.7013 | 0.018668 | 0 | 0.470866 | 0 | 0 | 0.547733 | 0.26553 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00315 | false | 0.001575 | 0.006299 | 0 | 0.014173 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a906ffa136322d08731cd3a5538fffeeb7adcea8 | 6,487 | py | Python | point/point/tests/trainer/test_model.py | RobertDurfee/VehicleSimulation | 567025e195539094393342a266973c148767330e | [
"MIT"
] | null | null | null | point/point/tests/trainer/test_model.py | RobertDurfee/VehicleSimulation | 567025e195539094393342a266973c148767330e | [
"MIT"
] | null | null | null | point/point/tests/trainer/test_model.py | RobertDurfee/VehicleSimulation | 567025e195539094393342a266973c148767330e | [
"MIT"
] | null | null | null | from unittest import TestCase
import numpy as np
from point.trainer.model import batch_generator, create_model, compile_model
import sys
from keras.models import Sequential
from keras.layers import InputLayer, Dense, LSTM, TimeDistributed
from keras.optimizers import Adam
class TestBatchGeneratorDivisible(TestCase):
def setUp(self):
self.n_samples = 6
self.in_features = 3
self.out_features = 2
self.X = np.array([[-3., 0., -8.],
[ 0., -2., 3.],
[ 3., -3., -1.],
[-1., 6., 1.],
[-7., 8., -9.],
[ 2., 5., 7.]])
self.Y = np.array([[-3., -8.],
[ 0., 1.],
[ 3., 2.],
[-1., 0.],
[-7., -9.],
[ 2., 9.]])
self.batch_size = 2
def test_shapes(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
for _ in range(3):
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (self.batch_size, self.in_features))
self.assertEqual(Y_batch.shape, (self.batch_size, self.out_features))
def test_values(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
for i in range(3):
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[i*self.batch_size:(i+1)*self.batch_size, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[i*self.batch_size:(i+1)*self.batch_size, :]))
def test_shapes_repeat_after_epoch(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
for _ in range(3):
next(generator)
for _ in range(3):
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (self.batch_size, self.in_features))
self.assertEqual(Y_batch.shape, (self.batch_size, self.out_features))
def test_values_repeat_after_epoch(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
for _ in range(3):
next(generator)
for i in range(3):
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[i*self.batch_size:(i+1)*self.batch_size, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[i*self.batch_size:(i+1)*self.batch_size, :]))
class TestBatchGeneratorNotDivisible(TestCase):
def setUp(self):
self.n_samples = 6
self.in_features = 3
self.out_features = 2
self.X = np.array([[-3., 0., -8.],
[ 0., -2., 3.],
[ 3., -3., -1.],
[-1., 6., 1.],
[-7., 8., -9.],
[ 2., 5., 7.]])
self.Y = np.array([[-3., -8.],
[ 0., 1.],
[ 3., 2.],
[-1., 0.],
[-7., -9.],
[ 2., 9.]])
self.batch_size = 4
def test_shapes(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
# Complete batch
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (self.batch_size, self.in_features))
self.assertEqual(Y_batch.shape, (self.batch_size, self.out_features))
# Incomplete batch
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (2, self.in_features))
self.assertEqual(Y_batch.shape, (2, self.out_features))
def test_values(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
# Complete batch
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[:self.batch_size, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[:self.batch_size, :]))
# Incomplete batch
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[self.batch_size:, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[self.batch_size:, :]))
def test_shapes_repeat_after_epoch(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
# First epoch
for _ in range(2):
next(generator)
# Second epoch
# Complete batch
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (self.batch_size, self.in_features))
self.assertEqual(Y_batch.shape, (self.batch_size, self.out_features))
# Incomplete batch
X_batch, Y_batch = next(generator)
self.assertEqual(X_batch.shape, (2, self.in_features))
self.assertEqual(Y_batch.shape, (2, self.out_features))
def test_values_repeat_after_epoch(self):
generator = batch_generator(self.X, self.Y, self.batch_size)
# First epoch
for _ in range(2):
next(generator)
# Second epoch
# Complete batch
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[:self.batch_size, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[:self.batch_size, :]))
# Incomplete batch
X_batch, Y_batch = next(generator)
self.assertTrue(np.allclose(X_batch, self.X[self.batch_size:, :]))
self.assertTrue(np.allclose(Y_batch, self.Y[self.batch_size:, :]))
class TestCreateModel(TestCase):
def setUp(self):
self.in_features = 2
self.hidden_units = [256, 64, 16, 4]
self.out_features = 1
def test_no_exception(self):
try:
create_model(self.in_features, self.hidden_units, self.out_features)
except:
self.fail('Unexpected error: ' + str(sys.exc_info()))
class TestCompileModel(TestCase):
def setUp(self):
self.model = Sequential([
InputLayer(input_shape=(10,)),
Dense(256),
Dense(10)
])
self.optimizer = 'Adam'
self.learning_rate = 0.001
self.loss = 'mean_squared_error'
def test_no_exception(self):
try:
compile_model(self.model, self.optimizer, self.learning_rate, self.loss)
except:
self.fail('Unexpected error: ' + str(sys.exc_info()))
| 29.621005 | 101 | 0.55696 | 804 | 6,487 | 4.299751 | 0.116915 | 0.088516 | 0.127857 | 0.068846 | 0.8221 | 0.808215 | 0.793752 | 0.793752 | 0.793752 | 0.769453 | 0 | 0.023548 | 0.312625 | 6,487 | 218 | 102 | 29.756881 | 0.751738 | 0.027285 | 0 | 0.80303 | 0 | 0 | 0.009211 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 1 | 0.106061 | false | 0 | 0.05303 | 0 | 0.189394 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e33ced9a783f745326969d3b24742fc594648ffb | 1,597 | py | Python | python/testData/inspections/PyArgumentListInspection/collectionsNamedTupleReplace.py | fduminy/intellij-community | fe13dc9ddb7f0f65397325ded25ecb239675eb59 | [
"Apache-2.0"
] | 2 | 2018-12-29T09:53:39.000Z | 2018-12-29T09:53:42.000Z | python/testData/inspections/PyArgumentListInspection/collectionsNamedTupleReplace.py | tnorbye/intellij-community | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | [
"Apache-2.0"
] | null | null | null | python/testData/inspections/PyArgumentListInspection/collectionsNamedTupleReplace.py | tnorbye/intellij-community | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | [
"Apache-2.0"
] | 1 | 2018-10-03T12:35:06.000Z | 2018-10-03T12:35:06.000Z | from collections import namedtuple
MyTup1 = namedtuple("MyTup1", "bar baz")
mt1 = MyTup1(1, 2)
# empty
mt1._replace()
# one
mt1._replace(bar=2)
mt1._replace(baz=1)
mt1._replace(<warning descr="Unexpected argument">foo=1</warning>)
mt1._replace(<warning descr="Unexpected argument">1</warning>)
# two
mt1._replace(bar=1, baz=2)
mt1._replace(baz=2, bar=1)
mt1._replace(baz=2, <warning descr="Unexpected argument">foo=1</warning>)
mt1._replace(<warning descr="Unexpected argument">2</warning>, <warning descr="Unexpected argument">1</warning>)
# two
mt1._replace(bar=1, baz=2, <warning descr="Unexpected argument">foo=3</warning>)
mt1._replace(<warning descr="Unexpected argument">1</warning>, <warning descr="Unexpected argument">2</warning>, <warning descr="Unexpected argument">3</warning>)
class MyTup2(namedtuple("MyTup2", "bar baz")):
pass
mt2 = MyTup2(1, 2)
# empty
mt2._replace()
# one
mt2._replace(bar=2)
mt2._replace(baz=1)
mt2._replace(<warning descr="Unexpected argument">foo=1</warning>)
mt2._replace(<warning descr="Unexpected argument">1</warning>)
# two
mt2._replace(bar=1, baz=2)
mt2._replace(baz=2, bar=1)
mt2._replace(baz=2, <warning descr="Unexpected argument">foo=1</warning>)
mt2._replace(<warning descr="Unexpected argument">2</warning>, <warning descr="Unexpected argument">1</warning>)
# two
mt2._replace(bar=1, baz=2, <warning descr="Unexpected argument">foo=3</warning>)
mt2._replace(<warning descr="Unexpected argument">1</warning>, <warning descr="Unexpected argument">2</warning>, <warning descr="Unexpected argument">3</warning>) | 33.270833 | 164 | 0.727614 | 226 | 1,597 | 5.044248 | 0.106195 | 0.189474 | 0.347368 | 0.473684 | 0.805263 | 0.778947 | 0.773684 | 0.773684 | 0.761404 | 0.75614 | 0 | 0.048409 | 0.094552 | 1,597 | 48 | 165 | 33.270833 | 0.739972 | 0.021916 | 0 | 0 | 0 | 0 | 0.236808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.035714 | 0.035714 | null | null | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
e35e7c61f1cce1428e31d341d2652fe88733e5a6 | 167 | py | Python | 1-100/29/29.py | Thomaw/Project-Euler | bcad5d8a1fd3ebaa06fa52d92d286607e9372a8d | [
"MIT"
] | null | null | null | 1-100/29/29.py | Thomaw/Project-Euler | bcad5d8a1fd3ebaa06fa52d92d286607e9372a8d | [
"MIT"
] | null | null | null | 1-100/29/29.py | Thomaw/Project-Euler | bcad5d8a1fd3ebaa06fa52d92d286607e9372a8d | [
"MIT"
] | null | null | null | l = []
for a in range(2,101):
for b in range (2,101):
c = a**b
if c not in l:
l.append(c)
print len(l)
| 20.875 | 35 | 0.359281 | 27 | 167 | 2.222222 | 0.518519 | 0.233333 | 0.266667 | 0.366667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 0.520958 | 167 | 7 | 36 | 23.857143 | 0.65 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.142857 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e36a75e4d73a8df0eb908f466a697ab447447fde | 140 | py | Python | reqlog/dbschema/shared.py | JFF-Bohdan/reqlog | a7ba7b6e12609d736b3cd8cd8bc2913d511848ee | [
"MIT"
] | null | null | null | reqlog/dbschema/shared.py | JFF-Bohdan/reqlog | a7ba7b6e12609d736b3cd8cd8bc2913d511848ee | [
"MIT"
] | null | null | null | reqlog/dbschema/shared.py | JFF-Bohdan/reqlog | a7ba7b6e12609d736b3cd8cd8bc2913d511848ee | [
"MIT"
] | null | null | null | import ksuid
def get_string_ksuid():
return str(ksuid.ksuid())
def get_base62_ksuid():
return ksuid.ksuid().toBase62()
| 14 | 36 | 0.657143 | 18 | 140 | 4.888889 | 0.5 | 0.181818 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036697 | 0.221429 | 140 | 9 | 37 | 15.555556 | 0.770642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | true | 0 | 0.2 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e36b00f522d3fc1f91ad397215fe2c5d1e58c15e | 128 | py | Python | tests/test_pool_tr.py | ffreemt/pool-translate-free | b3554f78edab1a733a975c712cae6e78aa3a365b | [
"MIT"
] | null | null | null | tests/test_pool_tr.py | ffreemt/pool-translate-free | b3554f78edab1a733a975c712cae6e78aa3a365b | [
"MIT"
] | null | null | null | tests/test_pool_tr.py | ffreemt/pool-translate-free | b3554f78edab1a733a975c712cae6e78aa3a365b | [
"MIT"
] | null | null | null | from pool_tr import __version__
from pool_tr.pool_tr import pool_tr
def test_version():
assert __version__[:3] in "0.1.0"
| 18.285714 | 37 | 0.757813 | 23 | 128 | 3.652174 | 0.521739 | 0.285714 | 0.238095 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.037037 | 0.15625 | 128 | 6 | 38 | 21.333333 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0.039063 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.25 | true | 0 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
e38998e033de8db9f720b66e6107874f13b38fd2 | 4,442 | py | Python | app/tests/api/api_v1/test_currency.py | germainlefebvre4/cryptobot-api | 6b8f10554bbb50ac669c8f8a87414c9292fc9d7b | [
"MIT"
] | null | null | null | app/tests/api/api_v1/test_currency.py | germainlefebvre4/cryptobot-api | 6b8f10554bbb50ac669c8f8a87414c9292fc9d7b | [
"MIT"
] | 8 | 2021-09-28T12:55:38.000Z | 2022-01-05T22:45:20.000Z | app/tests/api/api_v1/test_currency.py | germainlefebvre4/cryptobot-api | 6b8f10554bbb50ac669c8f8a87414c9292fc9d7b | [
"MIT"
] | null | null | null | # from datetime import date, datetime
# from dateutil.relativedelta import relativedelta
# # from fastapi import status
# from fastapi.testclient import TestClient
# from sqlalchemy.orm import Session
# from app.core.config import settings
# from app.tests.utils.utils import random_lower_string, random_weekdays
# from app.tests.utils.user import create_random_user
# from app.tests.utils.currency import get_random_exchange_currency
# def test_create_currency(
# client: TestClient, normal_user_token_headers: dict,
# ) -> None:
# r = client.get(f"{settings.API_V1_STR}/users/me", headers=normal_user_token_headers)
# user_id = r.json()["id"]
# base_currency, quote_currency = get_random_exchange_currency()
# data = {
# "base_currency": base_currency,
# "quote_currency": quote_currency,
# }
# response = client.post(
# f"{settings.API_V1_STR}/margin/currencies/?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# json=data,
# )
# content = response.json()
# assert response.status_code == 200
# assert "id" in content
# assert content["base_currency"] == base_currency
# assert content["quote_currency"] == quote_currency
# def test_read_currencies_by_user(
# client: TestClient, normal_user_token_headers: dict,
# ) -> None:
# r = client.get(f"{settings.API_V1_STR}/users/me", headers=normal_user_token_headers)
# user_id = r.json()["id"]
# response = client.get(
# f"{settings.API_V1_STR}/margin/currencies/?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# )
# content = response.json()
# assert response.status_code == 200
# assert isinstance(content, list)
# def test_read_currency_by_id_by_user(
# client: TestClient, normal_user_token_headers: dict,
# ) -> None:
# r = client.get(f"{settings.API_V1_STR}/users/me", headers=normal_user_token_headers)
# user_id = r.json()["id"]
# base_currency, quote_currency = get_random_exchange_currency()
# data = {
# "base_currency": base_currency,
# "quote_currency": quote_currency,
# }
# response = client.post(
# f"{settings.API_V1_STR}/margin/currencies/?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# json=data,
# )
# content = response.json()
# assert response.status_code == 200
# currency_id = content['id']
# response = client.get(
# f"{settings.API_V1_STR}/margin/currencies/{currency_id}?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# )
# content = response.json()
# assert response.status_code == 200
# assert "id" in content
# assert content["base_currency"] == base_currency
# assert content["quote_currency"] == quote_currency
# def test_delete_currency_by_id_by_user(
# client: TestClient, normal_user_token_headers: dict,
# ) -> None:
# r = client.get(f"{settings.API_V1_STR}/users/me", headers=normal_user_token_headers)
# user_id = r.json()["id"]
# base_currency, quote_currency = get_random_exchange_currency()
# data = {
# "base_currency": base_currency,
# "quote_currency": quote_currency,
# }
# response = client.post(
# f"{settings.API_V1_STR}/margin/currencies/?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# json=data,
# )
# content = response.json()
# assert response.status_code == 200
# currency_id = content['id']
# response = client.get(
# f"{settings.API_V1_STR}/margin/currencies/{currency_id}?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# )
# content = response.json()
# assert response.status_code == 200
# response = client.delete(
# f"{settings.API_V1_STR}/margin/currencies/{currency_id}?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# )
# content = response.json()
# assert response.status_code == 200
# currency_id = content['id']
# response = client.get(
# f"{settings.API_V1_STR}/margin/currencies/{currency_id}?" + \
# f"&user_id={user_id}",
# headers=normal_user_token_headers,
# )
# content = response.json()
# assert response.status_code == 404
| 31.503546 | 90 | 0.646781 | 535 | 4,442 | 5.056075 | 0.115888 | 0.044362 | 0.088725 | 0.130129 | 0.834011 | 0.834011 | 0.834011 | 0.834011 | 0.834011 | 0.834011 | 0 | 0.010357 | 0.21747 | 4,442 | 140 | 91 | 31.728571 | 0.767837 | 0.941693 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8b6ed41f900a7002b2f19561cffe2f4610bbb025 | 113 | py | Python | First Steps/test_HelloWorld.py | zeltyr/learning_python | 785262786c6c7a7879aeec8dfde071e77774a0d4 | [
"MIT"
] | null | null | null | First Steps/test_HelloWorld.py | zeltyr/learning_python | 785262786c6c7a7879aeec8dfde071e77774a0d4 | [
"MIT"
] | null | null | null | First Steps/test_HelloWorld.py | zeltyr/learning_python | 785262786c6c7a7879aeec8dfde071e77774a0d4 | [
"MIT"
] | null | null | null | import HelloWorld
def test_get_text_hello_world():
assert HelloWorld.get_text_hello_world() == "Hello world" | 28.25 | 61 | 0.79646 | 16 | 113 | 5.1875 | 0.5625 | 0.361446 | 0.289157 | 0.409639 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115044 | 113 | 4 | 61 | 28.25 | 0.83 | 0 | 0 | 0 | 0 | 0 | 0.096491 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
8b91915f11171cc19d4a187a59270d952c7ff875 | 3,937 | py | Python | tests/test_models.py | fabianmoss/pkspell | f61f468b512210e5a78cc4c1e30617847e2cf6ef | [
"MIT"
] | 7 | 2021-08-06T08:00:11.000Z | 2022-03-10T01:56:27.000Z | tests/test_models.py | fabianmoss/pkspell | f61f468b512210e5a78cc4c1e30617847e2cf6ef | [
"MIT"
] | null | null | null | tests/test_models.py | fabianmoss/pkspell | f61f468b512210e5a78cc4c1e30617847e2cf6ef | [
"MIT"
] | 1 | 2021-08-30T09:38:36.000Z | 2021-08-30T09:38:36.000Z | import pytest
import numpy as np
import torch
from src.models.models import PKSpell, PKSpell_single
from src.data.pytorch_datasets import pitch_to_ix, ks_to_ix
from pathlib import Path
def test_PKSpell_single():
n_features = 12
piece_len = [20, 9, 8, 5]
batch_size = 4
model = PKSpell_single(
n_features,
12,
pitch_to_ix,
ks_to_ix,
rnn_depth=1,
cell_type="GRU",
dropout=None,
bidirectional=True,
mode="both",
)
dummy_input_midi = np.random.randint(
0, 12, size=(max(piece_len), batch_size, n_features)
)
dummy_pitch = np.random.randint(
0, len(pitch_to_ix), size=(max(piece_len), batch_size)
)
dummy_ks = np.random.randint(0, len(ks_to_ix), size=(max(piece_len), batch_size))
# try a training
loss = model(
torch.Tensor(dummy_input_midi),
torch.Tensor(dummy_pitch).long(),
torch.Tensor(dummy_ks).long(),
torch.Tensor(piece_len),
)
assert loss.shape == torch.Size([])
# try a prediction
prediction = model.predict(
torch.Tensor(dummy_input_midi), torch.Tensor([20, 9, 8, 5])
)
assert type(prediction) == tuple
assert len(prediction[0]) == batch_size
assert len(prediction[1]) == batch_size
for i, l in enumerate(piece_len):
assert len(prediction[0][i]) == l
assert len(prediction[1][i]) == l
def test_PKSpell():
n_features = 12
piece_len = [20, 9, 8, 5]
batch_size = 4
model = PKSpell(
n_features,
12,
pitch_to_ix,
ks_to_ix,
rnn_depth=1,
cell_type="GRU",
dropout=None,
bidirectional=True,
mode="both",
)
dummy_input_midi = np.random.randint(
0, 12, size=(max(piece_len), batch_size, n_features)
)
dummy_pitch = np.random.randint(
0, len(pitch_to_ix), size=(max(piece_len), batch_size)
)
dummy_ks = np.random.randint(0, len(ks_to_ix), size=(max(piece_len), batch_size))
# try a training
loss = model(
torch.Tensor(dummy_input_midi),
torch.Tensor(dummy_pitch).long(),
torch.Tensor(dummy_ks).long(),
torch.Tensor(piece_len),
)
assert loss.shape == torch.Size([])
# try a prediction
prediction = model.predict(
torch.Tensor(dummy_input_midi), torch.Tensor([20, 9, 8, 5])
)
assert type(prediction) == tuple
assert len(prediction[0]) == batch_size
assert len(prediction[1]) == batch_size
for i, l in enumerate(piece_len):
assert len(prediction[0][i]) == l
assert len(prediction[1][i]) == l
def test_PKSpell_odd_hidden_dim():
hidden_dim = 11
hidden_dim2 = 7
with pytest.raises(ValueError):
model = PKSpell(
5,
hidden_dim,
pitch_to_ix,
ks_to_ix,
rnn_depth=1,
cell_type="GRU",
dropout=None,
bidirectional=True,
mode="both",
)
with pytest.raises(ValueError):
model = PKSpell(
5,
10,
pitch_to_ix,
ks_to_ix,
rnn_depth=1,
cell_type="GRU",
dropout=None,
bidirectional=True,
mode="both",
hidden_dim2=hidden_dim2,
)
def test_import_model():
# import pkspell
model = torch.load(Path("models/pkspell.pt"))
# import pkspell_single
model = torch.load(Path("models/pkspell_single.pt"))
assert True
def test_import_pretrained_state_dict():
# import pkspell
model = PKSpell(17, 300, pitch_to_ix, ks_to_ix, hidden_dim2=24)
model.load_state_dict(torch.load(Path("models/pkspell_statedict.pt")))
# import pkspell_single
model = PKSpell_single(17, 300, pitch_to_ix, ks_to_ix)
model.load_state_dict(torch.load(Path("models/pkspell_single_statedict.pt")))
assert True
| 26.601351 | 85 | 0.602235 | 523 | 3,937 | 4.290631 | 0.170172 | 0.032086 | 0.036096 | 0.034314 | 0.838235 | 0.808824 | 0.769162 | 0.734403 | 0.716578 | 0.677362 | 0 | 0.026521 | 0.281687 | 3,937 | 147 | 86 | 26.782313 | 0.766973 | 0.034798 | 0 | 0.708333 | 0 | 0 | 0.034292 | 0.022422 | 0 | 0 | 0 | 0 | 0.116667 | 1 | 0.041667 | false | 0 | 0.066667 | 0 | 0.108333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8bb9d8bf6abc609e430393ecbb34c8daa6941a53 | 99 | py | Python | vibora/request/__init__.py | mnxzyw/vibora | 445bf5bf50dcb27f1415a874fe53d67a8004a2b9 | [
"MIT"
] | 6,238 | 2018-06-14T19:29:47.000Z | 2022-03-29T21:42:03.000Z | vibora/request/__init__.py | LL816/vibora | 4cda888f89aec6bfb2541ee53548ae1bf50fbf1b | [
"MIT"
] | 213 | 2018-06-13T20:13:59.000Z | 2022-03-26T07:46:49.000Z | vibora/request/__init__.py | LL816/vibora | 4cda888f89aec6bfb2541ee53548ae1bf50fbf1b | [
"MIT"
] | 422 | 2018-06-20T01:29:41.000Z | 2022-02-27T16:45:29.000Z | from typing import TYPE_CHECKING
from .request import *
if TYPE_CHECKING:
from .hints import *
| 19.8 | 32 | 0.767677 | 14 | 99 | 5.285714 | 0.571429 | 0.324324 | 0.432432 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 99 | 4 | 33 | 24.75 | 0.91358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
8bcec850a38824a38caad1b88a7683554af31a36 | 1,619 | py | Python | tools/dataset_converters/icdar2013_bbox_2_segmentation.py | vansin/tabnet | 2403c8134c23a704940522ace92a75b0fc6f5d99 | [
"Apache-2.0"
] | 2 | 2021-10-18T02:52:18.000Z | 2022-01-21T08:54:18.000Z | tools/dataset_converters/icdar2013_bbox_2_segmentation.py | vansin/tabnet | 2403c8134c23a704940522ace92a75b0fc6f5d99 | [
"Apache-2.0"
] | null | null | null | tools/dataset_converters/icdar2013_bbox_2_segmentation.py | vansin/tabnet | 2403c8134c23a704940522ace92a75b0fc6f5d99 | [
"Apache-2.0"
] | null | null | null | import json
train_in = open('data/icdar2013/annotations/table_ICDAR2013_train.json')
test_in = open('data/icdar2013/annotations/table_ICDAR2013_test.json')
data_train = json.load(train_in)
data_test = json.load(test_in)
for annotation in data_train['annotations']:
bbox = annotation['bbox']
segmentation = []
# left_top
segmentation.append(int(bbox[0]))
segmentation.append(int(bbox[1]))
# left_bottom
segmentation.append(int(bbox[0]))
segmentation.append(int(bbox[1] + bbox[3]))
# right_bottom
segmentation.append(int(bbox[0] + bbox[2]))
segmentation.append(int(bbox[1] + bbox[3]))
# right_top
segmentation.append(int(bbox[0] + bbox[2]))
segmentation.append(int(bbox[1]))
annotation['segmentation'].append(segmentation)
for annotation in data_test['annotations']:
bbox = annotation['bbox']
segmentation = []
# left_top
segmentation.append(int(bbox[0]))
segmentation.append(int(bbox[1]))
# left_bottom
segmentation.append(int(bbox[0]))
segmentation.append(int(bbox[1] + bbox[3]))
# right_bottom
segmentation.append(int(bbox[0] + bbox[2]))
segmentation.append(int(bbox[1] + bbox[3]))
# right_top
segmentation.append(int(bbox[0] + bbox[2]))
segmentation.append(int(bbox[1]))
annotation['segmentation'].append(segmentation)
with open('data/icdar2013/annotations/table_ICDAR2013_segm_train.json',
'w') as outfile:
json.dump(data_train, outfile)
with open('data/icdar2013/annotations/table_ICDAR2013_segm_test.json',
'w') as outfile1:
json.dump(data_test, outfile1)
| 29.981481 | 72 | 0.692403 | 210 | 1,619 | 5.204762 | 0.152381 | 0.296432 | 0.307411 | 0.365965 | 0.803294 | 0.803294 | 0.803294 | 0.722781 | 0.63129 | 0.63129 | 0 | 0.042553 | 0.158122 | 1,619 | 53 | 73 | 30.54717 | 0.759354 | 0.053737 | 0 | 0.628571 | 0 | 0 | 0.181221 | 0.144452 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.028571 | 0 | 0.028571 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
473e6fdc7dec5d3c88b1c975d3122b2a63e99166 | 40,645 | py | Python | mmseg/datasets/voc.py | giladcohen/mmsegmentation | 04eb42b06628cb96a028801c981918741d192529 | [
"Apache-2.0"
] | null | null | null | mmseg/datasets/voc.py | giladcohen/mmsegmentation | 04eb42b06628cb96a028801c981918741d192529 | [
"Apache-2.0"
] | null | null | null | mmseg/datasets/voc.py | giladcohen/mmsegmentation | 04eb42b06628cb96a028801c981918741d192529 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import os.path as osp
import numpy as np
import os
from .builder import DATASETS
from .custom import CustomDataset
from research.utils import generate_farthest_vecs
@DATASETS.register_module()
class PascalVOCDataset(CustomDataset):
"""Pascal VOC dataset.
Args:
split (str): Split txt file for Pascal VOC.
"""
EMB_DIM = 200
CLASSES = ('background', 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle',
'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog',
'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa',
'train', 'tvmonitor')
PALETTE = [[0, 0, 0], [128, 0, 0], [0, 128, 0], [128, 128, 0], [0, 0, 128],
[128, 0, 128], [0, 128, 128], [128, 128, 128], [64, 0, 0],
[192, 0, 0], [64, 128, 0], [192, 128, 0], [64, 0, 128],
[192, 0, 128], [64, 128, 128], [192, 128, 128], [0, 64, 0],
[128, 64, 0], [0, 192, 0], [128, 192, 0], [0, 64, 128]]
def __init__(self, split, **kwargs):
self.emb = kwargs.pop('emb', None)
super(PascalVOCDataset, self).__init__(
img_suffix='.jpg', seg_map_suffix='.png', split=split, **kwargs)
assert osp.exists(self.img_dir) and self.split is not None
if self.emb is not None:
if os.path.exists(self.emb['emb_path']):
print('emb path {} exist. reading content to self.idx_to_class_emb_vec'.format(self.emb['emb_path']))
self.idx_to_class_emb_vec = np.load(self.emb['emb_path'])
else:
print('Generating emb map for {} and dumping it to {}'.format(self.emb['emb_selection'], self.emb['emb_path']))
self.idx_to_class_emb_vec = self.set_emb_vecs(self.emb['emb_selection'])
os.makedirs(os.path.dirname(self.emb['emb_path']), exist_ok=True)
np.save(self.emb['emb_path'], self.idx_to_class_emb_vec)
@staticmethod
def parse_vec(s: str):
return np.asarray(list(map(float, s.split())))
def set_glove(self):
gloves = np.zeros((len(self.CLASSES), self.EMB_DIM), dtype=np.float32)
# background
gloves[0] = self.parse_vec('0.18118 -0.40912 -0.62699 0.66349 0.52609 0.22725 1.0126 -0.59382 0.10862 -0.57365 -0.37711 -0.13031 -0.26549 -0.60595 -0.30474 0.37288 0.59625 -0.42375 0.37377 -0.25376 -0.50042 0.22405 0.060972 0.4461 -0.43079 0.82193 0.32821 0.074003 -0.38279 0.34623 -0.018477 0.12097 -0.30397 -0.2223 0.34332 -0.53648 0.4261 -0.46458 0.26739 -0.43006 0.36895 0.085638 -0.24012 0.23693 -0.32983 0.03293 -0.18042 -0.48865 -0.46444 -0.33425 0.12641 0.38959 0.27181 0.41119 -0.21471 -0.17354 -0.26007 -0.19764 0.35169 -0.2575 0.30232 -0.54958 0.84861 -0.035018 0.19377 -0.30884 -0.4549 0.27329 0.24261 0.064083 -0.043068 -0.097231 -0.4867 -0.16006 -0.44754 0.16983 -0.071483 0.23399 -0.071736 -0.10345 0.091824 0.37589 -0.11695 -0.69112 0.0019069 -1.0029 0.76611 0.53357 -0.16726 0.045585 -0.35476 0.42482 -0.42188 -0.90294 0.35648 -0.053211 0.20893 0.24375 -0.044005 0.25183 -0.14815 -0.45803 0.0016516 0.14754 -0.84007 -0.075739 -0.25731 0.07955 -0.29573 -0.033395 -0.052914 0.21006 -0.78917 -0.10985 0.19122 0.35997 0.45986 0.46184 -0.20671 -0.34638 0.23701 -0.17631 0.68373 0.19996 0.44723 0.46754 -0.011539 -0.50926 -0.092837 0.62416 0.50671 0.30946 0.080026 0.68015 -0.38421 0.19975 0.21836 0.26643 -0.36207 0.37803 0.039925 -0.29145 0.33588 0.0147 -0.50419 -0.014059 -0.3277 -0.18551 0.11956 0.24942 0.45746 -0.33681 -3.1694 -0.60703 -0.37267 0.7583 0.7298 -0.26974 -0.42411 0.30439 0.10229 0.034068 -0.2953 0.45362 -0.29974 0.19263 -0.053222 -0.57162 0.20615 0.22706 0.21614 -0.25294 0.26947 -0.13109 -0.13436 0.19612 0.019767 0.23358 0.089522 0.12016 -0.20462 0.18511 -0.014861 0.036399 0.063346 0.31004 -0.50888 0.19682 0.3836 -0.62704 0.18202 -0.0037371 0.36821 -0.46103 -0.4889 0.021669 0.25197 0.11028 -0.54431 0.09691')
# airplane (instead of aeroplane)
gloves[1] = self.parse_vec('-0.42252 -0.72499 0.3823 -0.28675 -0.070732 1.082 0.61925 -0.51744 -0.24192 0.36525 -0.10519 0.68813 -0.82839 0.0121 -0.30335 0.057322 0.077832 0.11161 0.46033 -0.21916 -0.049768 -0.24293 0.12415 -0.40696 0.32383 1.0217 0.62564 -0.75066 -0.41027 -0.0758 -0.1808 -0.027986 0.21466 -1.1386 0.20759 0.67844 -0.60843 0.28039 1.0015 0.014468 0.2675 -0.10874 -0.23052 -0.83247 0.2413 -0.11418 -0.31517 -0.28662 0.067465 0.17122 0.16358 -0.38727 -0.33752 0.15207 0.071406 -0.23285 -0.39292 0.79661 -0.01181 -0.61611 0.42596 -0.024823 0.51229 -0.1942 -0.31514 -0.9923 0.26809 -0.16498 0.20328 -0.21459 -0.70433 -0.0017985 -0.65342 -0.85474 0.161 -0.71959 -0.50075 -0.18926 0.31129 0.90581 0.58413 0.87044 -0.056666 -0.26441 0.29036 0.07847 0.026343 0.3536 -1.1024 0.4081 0.26188 -0.20925 -0.728 -0.04421 -0.21305 -0.2336 -0.33843 -0.27006 -0.81843 -0.19834 0.58124 0.039614 -0.90533 0.39462 -0.35865 -0.47045 0.22981 -0.044953 0.28625 -0.14308 -0.31557 -0.015199 -0.28968 -0.28257 -0.72873 -0.13707 -0.0014256 -0.44722 -0.14099 -0.062103 0.53414 -0.18197 -0.13406 -0.41105 -0.39153 0.73264 0.031486 0.3796 0.40439 0.37544 0.49086 0.38665 0.095826 0.2573 -0.47709 -0.5425 0.19142 0.66534 -0.26036 0.044465 -0.1965 0.21443 0.090587 0.48187 0.063059 0.10099 0.23694 -0.16066 -0.39295 -0.62392 1.2988 -0.2949 -1.8037 0.32934 -0.11134 0.0236 0.29623 -0.39351 0.058452 -0.37467 -0.029277 0.073365 0.3801 0.67572 0.10034 -0.27386 -0.58898 0.18683 0.029444 0.20757 0.01653 -0.4761 0.15124 -0.24604 0.064738 0.22999 -0.80299 0.20186 -0.012943 0.80957 0.25185 -0.28367 -0.0093086 0.2747 -0.91049 0.24138 0.31127 -0.084327 0.15578 -0.23792 0.74639 -0.24335 -0.084517 -0.072658 0.027183 0.083656 0.10962 0.025677 0.26856 0.049582')
# bicycle
gloves[2] = self.parse_vec('-0.20953 0.71027 0.20456 0.030102 -0.15586 -0.0017965 0.64207 -0.7232 0.6517 -0.50303 0.46756 0.38291 -1.0521 -0.35768 -0.093636 -0.026939 -0.15788 -0.079467 0.56652 0.34809 0.92798 -0.26454 0.89587 -0.18168 -0.2479 1.0151 0.1562 0.29677 -0.24759 -0.32084 0.18955 -0.15548 0.54911 -0.67806 0.47885 0.027665 -0.26085 0.22484 0.48099 0.058068 0.55766 0.084012 -0.051574 0.40012 0.31149 -0.34196 0.091012 0.32463 0.48642 0.49414 0.16418 -0.62328 -0.41107 -0.56187 0.57129 0.045219 0.095414 0.12472 0.19763 0.10691 -0.12217 0.10911 0.094954 -0.20152 -0.17483 -0.42543 0.16613 0.58936 -0.095105 -0.44676 -0.36252 0.10529 -0.36694 0.103 0.21666 0.17183 -0.45817 0.25464 -0.066484 -0.39853 0.59684 0.24154 -0.46958 -0.42001 0.19729 0.45703 0.9437 0.19471 -0.28348 0.26896 0.17337 -0.28803 -0.21938 -0.04655 -0.45331 0.21835 -0.11856 0.13973 0.085915 0.59576 0.83806 0.44316 0.061283 -0.023769 -0.54969 -1.5631 -0.30484 -0.5664 0.17964 0.29822 0.67106 0.29276 0.32477 0.35384 0.12617 0.65192 -0.64218 -0.38125 -0.52421 0.66719 0.61863 -0.29273 -0.2346 0.22393 -0.29474 -0.44579 -0.025123 1.3639 -0.09371 0.4203 1.0943 0.54408 0.28939 -0.42816 -0.44594 0.49912 -0.24159 0.27606 -0.14985 0.13104 -0.39032 0.26478 0.03135 0.18696 -0.39013 -0.0049679 0.50424 -0.36814 0.17211 0.68211 0.6758 -0.56006 -1.8271 0.35589 0.007275 -0.056845 -0.12371 0.62302 0.25987 -0.27712 0.66312 0.49514 0.3868 0.20792 -0.24442 -0.03075 -0.042747 -0.099471 0.076467 0.0563 0.81152 -0.2869 -0.53929 -0.16035 0.71853 0.59261 0.050601 -0.62398 0.25599 0.37793 0.56556 0.24721 0.32267 -0.34488 0.035321 -0.45666 0.083282 -0.74305 0.25377 0.26414 0.53079 -0.27572 0.12793 0.22571 0.028646 -0.22586 0.5593 0.15792 0.002043 0.15446')
# bird
gloves[3] = self.parse_vec('0.050286 -0.40342 -0.085693 -0.11261 -0.40626 0.27764 0.28587 -0.036815 0.29082 0.53717 -0.096179 0.20294 -0.52494 -0.42556 -0.020042 0.59147 0.60556 -0.096592 0.078125 -1.009 -0.48508 0.26272 -0.36493 -0.72437 0.044094 0.46839 0.22695 0.080163 -0.18623 0.49568 -0.067437 0.29948 -0.36965 -0.73587 -0.033697 0.35647 -0.13801 0.42026 -0.064175 -0.35642 -0.40864 0.081728 0.1202 -0.45304 0.35192 -0.16238 -0.40587 0.28837 0.72754 0.5276 -0.12201 -0.18372 0.36878 0.46526 0.32681 -0.56752 -0.50191 0.60814 0.57881 0.0227 0.23608 0.035366 0.16645 -0.028746 -0.13858 -0.42193 0.42848 -0.011398 0.32289 0.204 -0.34057 0.30971 -0.5685 -0.85169 -0.12805 -0.3842 -0.11821 0.050055 0.50502 0.58767 1.0039 0.3996 -0.027687 0.17466 -0.22844 0.12718 -0.51194 -0.45218 -0.20525 0.055035 0.27 -1.0207 -1.1003 -0.51314 -0.35455 -0.13669 -0.17903 0.10799 -0.24093 0.66859 -0.13704 0.50379 -0.065461 0.15555 -0.51893 0.62364 -0.52682 0.16933 -0.44093 -0.090353 -0.84958 0.42558 -0.31874 -0.38313 0.39895 -0.067433 1.0144 -0.17431 -0.063368 -0.60363 0.20053 0.13679 -0.024741 0.47469 -0.77892 -0.28663 -0.27192 -0.67562 0.28207 0.1935 0.063162 0.73112 0.072682 0.51456 -0.55077 -0.25402 -0.077662 0.035238 -0.32021 -0.33759 -0.24357 0.035842 0.81423 -0.3508 0.18006 -0.049245 0.12888 -0.16803 -0.3665 0.63389 -0.13232 -0.54769 -3.4213 -0.38828 -0.24938 -0.41294 -0.2727 -0.3304 0.23315 -0.52551 0.21471 -0.38583 -0.30177 0.30061 -0.33541 -0.60107 0.23551 -0.80369 -0.13737 -0.1429 0.16166 0.32293 -0.12294 0.16138 -0.093296 0.14234 0.27728 0.036312 -0.19796 0.1936 -0.46891 0.82351 -0.53899 -0.24703 0.049887 0.54725 0.009746 0.57974 -0.0091502 -0.34196 0.026213 0.19177 0.5079 0.16918 0.6699 0.4473 -0.61384 -0.015805 -0.42108 -0.087537')
# boat
gloves[4] = self.parse_vec('-0.39539 -0.25468 0.043564 0.36511 0.18522 0.329 0.19064 -0.11648 0.31226 -0.040298 -0.0062365 0.30342 -0.42173 0.77493 -0.03998 -0.067118 0.13732 0.95702 0.40353 -0.33322 -0.59533 -0.12267 0.12258 -0.042508 -0.14386 1.1716 0.39072 -0.047285 -0.0033427 -0.81392 0.72796 0.052686 -0.049161 -0.71438 -0.086344 0.33522 0.14088 0.70827 0.2561 0.16326 -0.006642 0.090248 0.16412 0.17618 0.47049 0.018178 0.77729 -0.39745 0.71365 0.64572 0.26825 -0.00055794 -0.76011 -0.37583 -0.20395 -0.083587 -0.49212 0.35199 -0.091585 -0.42059 0.166 1.0091 0.11889 -1.0233 -0.25455 -0.0037728 -0.31496 -0.0079189 -0.00569 -0.94841 -0.24254 0.00080959 0.65628 -0.54486 0.6096 -0.38037 -0.78455 0.12337 0.72398 -0.31379 1.1729 -0.18303 0.10475 -0.04287 -0.27979 -0.10889 0.3874 0.11326 -0.15383 0.32006 -0.11064 -0.1193 -0.33176 -0.31274 -0.11912 0.16069 -0.037982 0.23802 -0.91678 0.30449 0.60797 0.073835 -0.26335 -0.029634 -1.05 0.20826 -0.21924 0.13652 0.40489 0.25212 -0.22705 0.26812 -0.1994 -0.53777 -0.4988 -0.47727 -0.66004 -0.83413 0.047445 0.15756 0.23355 0.21463 -0.056451 0.080833 -0.044144 -0.046193 0.020127 0.61713 0.23021 0.46089 0.45184 -0.053696 -0.29686 0.065724 -0.2795 0.38674 0.10408 0.34197 -0.55379 -0.67967 -0.47101 0.35917 0.1974 -0.043696 -0.052605 -0.73159 0.16067 -0.3786 0.2434 0.61161 0.48916 -0.57555 -3.0066 -0.3901 0.38596 -0.048683 0.39269 0.6831 0.64456 0.87903 0.21022 0.17747 0.017671 0.60079 -0.41003 -0.26996 -0.0044936 0.14928 -0.40555 -0.16593 -0.85092 0.027109 -0.40114 -0.038453 0.17137 -0.17077 -0.17581 0.11836 0.223 0.59717 0.36317 -0.035388 0.30407 0.53003 -0.090254 -0.50943 0.28771 -0.1125 -0.35207 -0.07374 0.57425 -0.60225 -0.19009 0.43454 0.64101 -0.081903 0.529 0.15899 0.021136 -0.016624')
# bottle
gloves[5] = self.parse_vec('-0.79897 0.12251 0.15633 -0.023137 0.20395 -0.40863 0.11329 -0.26234 -0.04337 -0.28863 0.32162 0.80217 -0.69404 0.072699 0.032425 0.081859 0.49708 0.44474 -0.20787 0.10049 -0.36369 -0.020898 -0.0027382 -0.61522 0.38828 1.4885 -0.031765 0.27525 0.4149 0.13678 0.032849 0.094527 -1.2946 -0.14829 -0.75905 0.21244 0.11954 -0.25734 0.21472 0.11741 0.23785 0.23741 -0.32102 -0.16134 0.21676 0.05692 0.3519 0.57165 -0.13035 0.25762 -0.13437 0.048592 0.069208 -0.12793 0.08571 -0.17723 0.75061 0.074342 -0.63924 -0.046564 0.18867 -0.22023 -0.12546 -0.53414 0.21347 -1.2106 -0.14119 -0.62831 0.80332 -0.020454 0.21436 -0.5496 0.38633 0.36767 0.26217 -0.33457 0.10184 0.025629 0.01278 -0.0032671 1.1778 0.25938 -0.15306 -0.96678 0.5922 0.69536 -0.28397 0.082051 -0.4951 1.5883 -0.47416 0.017795 -0.041617 -0.5739 0.10164 -0.25656 -0.37935 -0.0095207 -0.29664 0.33145 -0.20419 -0.18354 -0.066054 0.56563 -0.8608 -0.54741 0.14342 -0.7112 -0.76279 -0.50002 -0.69331 0.75902 0.05013 -0.75578 0.058621 -0.36132 -0.57238 -0.18413 -0.10716 0.1963 -0.28295 0.13177 0.37334 -0.49856 0.085692 0.14263 0.040408 0.46739 0.47784 -0.35338 0.032038 -0.31784 -0.53549 -0.49545 -0.24752 0.082921 -0.22467 -0.093533 0.20728 0.49855 -0.056853 0.15364 -0.11297 -0.5746 0.33484 -0.55111 0.74624 0.21023 -0.20434 0.19723 0.6313 -0.2206 -3.2973 0.40132 -0.045925 0.031596 -0.19902 0.52396 0.18297 0.2443 0.30136 -0.26096 0.4531 -0.36779 -0.019241 0.17294 -0.69498 0.31856 0.10471 0.47494 0.11335 0.68598 -0.37452 0.053953 -0.72787 -0.50056 -0.33375 0.64967 0.29411 0.48564 0.034691 -0.04236 -0.02612 -0.10335 -0.27702 0.018744 -0.021129 -0.73097 -0.15203 -0.11875 -0.15249 -0.15179 -0.53379 0.75922 0.92714 -0.14741 0.26636 -0.23923 0.84491 -0.7012')
# bus
gloves[6] = self.parse_vec('0.36878 -0.040716 0.14877 -0.16091 0.25884 0.42093 -0.08497 -0.20741 -0.24405 0.16025 0.18248 0.27653 0.17274 -0.15511 0.1832 -0.59696 0.35511 0.21179 -0.88778 -0.14127 0.27427 -0.22426 -0.49829 -0.2489 -0.64608 -0.51976 0.029963 -0.39474 -0.3698 -0.45758 -0.26379 0.0055427 0.072394 -0.4574 0.13783 0.41553 -0.71718 0.36648 0.80797 0.11551 -0.44923 0.33793 -0.38741 -0.55758 0.064246 0.040185 -0.13671 0.15378 0.41823 0.33495 0.265 -0.18855 -0.20561 -0.56125 -0.49499 -0.29046 -0.38711 -0.040435 -0.60069 -0.37021 0.40149 -0.15775 0.64168 -0.027062 0.43667 -0.3754 0.2332 0.4121 -0.3158 -0.1494 -0.23384 -0.013539 0.25869 -0.56107 -0.29731 0.56592 -0.13422 0.012458 0.19112 0.35151 0.3017 -0.63447 -0.020045 -0.027795 -0.0084391 0.27444 -0.13512 -0.70592 0.64869 -0.32654 0.13714 -0.43252 -0.1321 0.32763 0.043845 0.2212 0.18353 -0.15674 -0.50952 0.15471 0.60796 -0.63089 0.29242 -0.37111 -0.31205 -0.91168 0.4415 -0.25655 0.36425 0.097246 -0.55528 0.29396 0.45414 -0.10683 -0.17456 -0.3311 -0.10974 -0.32565 0.10095 0.74103 0.3077 -0.60567 -0.34343 -0.08782 -0.36266 0.63673 0.17799 0.61259 -0.18688 0.80418 0.42218 -0.20539 0.14961 -0.30303 -0.79753 0.10696 -0.35002 0.23048 0.15042 0.061245 -0.59652 0.0026576 0.05751 0.034295 0.024454 0.097094 -0.0058212 -0.79352 -0.43982 -0.45078 0.33703 -0.081068 -4.0471 0.21823 0.20914 -0.66168 -0.010194 0.86391 0.31894 0.0099252 0.69654 0.4219 0.68502 0.26832 -0.31542 -0.60462 -0.89089 -0.27853 -0.28233 -0.22141 -0.31363 -0.045722 -0.78919 -0.42835 0.90955 -0.49916 0.20697 0.036049 -0.38361 0.69864 0.58477 -0.12021 -0.14528 0.61904 -0.39795 0.042507 -0.04765 0.37876 0.54698 0.26489 0.6039 -0.48082 0.017844 0.4663 -0.35059 -0.098496 -0.5092 0.43729 -0.3703 0.73458')
# car
gloves[7] = self.parse_vec('-0.023756 -0.6095 -0.64204 0.21877 0.46728 0.18328 -0.017327 -0.1671 0.15519 -0.19869 0.58117 0.40394 -0.39322 -0.14633 -0.14179 0.015474 0.11165 -0.10333 -0.20328 -0.071406 0.12644 -0.26139 -0.36218 -0.67246 -0.34604 0.59822 -0.17553 -0.031497 0.11128 -0.3225 0.061777 0.38997 -0.33846 -0.1767 -0.082802 0.41319 -0.47078 0.48865 0.74484 0.24344 0.43444 0.34383 -0.63643 0.41448 -0.38013 -0.16224 0.41776 -0.045915 0.76219 0.055854 0.80065 0.22815 -0.95708 -0.064152 -0.25136 0.030722 -0.56599 0.13781 0.093393 -0.83462 0.32205 -0.065024 0.86411 -0.054507 0.19187 -0.39785 0.16377 0.57524 -0.37361 -0.72036 -0.48547 0.18768 -0.2428 -0.0031741 -0.43129 0.21333 -0.36452 0.15536 -0.18761 0.43804 0.66989 0.1977 -0.48026 0.17955 -0.26623 0.3866 0.37762 0.33181 -0.29401 0.089559 -0.1417 0.090185 0.23631 0.05726 0.49807 0.5556 0.0085019 -0.19751 -0.99868 -0.12837 0.72538 -0.21058 -0.17776 0.54406 -0.51257 -0.30398 0.5172 -0.4982 0.72498 -0.13728 -0.15657 0.48735 -0.12313 -0.44957 0.10629 0.13345 -0.71389 -0.41793 -0.77205 0.70404 0.35033 -0.33719 -0.23397 -0.18326 -0.36967 0.76203 0.23946 0.85417 0.069386 -0.19864 0.38917 -0.12225 -0.34538 0.062926 -0.31898 0.17836 -0.4046 0.38409 -0.20409 0.35095 -0.42669 -0.06645 0.2125 0.14951 -0.23864 0.1338 0.11083 0.21279 -0.0037618 -0.13022 0.21465 -0.51508 -4.7217 0.15789 0.26162 -0.15878 0.012484 -0.13879 0.40189 -0.49206 0.35261 0.62121 0.37681 0.54427 0.06366 -0.3226 -0.47194 -0.6409 -0.16708 -0.067091 0.21019 0.52271 -0.51378 -0.45009 0.77929 -0.033527 0.34275 0.15728 0.22613 1.0059 0.091323 0.025024 0.1937 0.17346 0.35938 -0.59598 0.52244 -0.32664 0.23388 0.29734 -0.1782 -0.58709 0.58139 -0.39022 -0.17797 0.02756 -0.2737 0.00032772 0.3212 0.31734')
# cat
gloves[8] = self.parse_vec('0.14557 -0.47214 0.045594 -0.11133 -0.44561 0.016502 0.46724 -0.18545 0.41239 -0.67263 -0.48698 0.72586 -0.22125 -0.20023 0.1779 0.67062 0.41636 0.065783 0.48212 -0.035627 -0.47048 0.077485 -0.28296 -0.49671 0.337 0.71805 0.22005 0.12718 0.067862 0.40265 -0.01821 0.78379 -0.52571 -0.39359 -0.56827 -0.15662 -0.084099 -0.20918 -0.066157 0.25114 -0.40015 0.1593 0.17887 -0.3211 0.09951 0.52923 0.48289 0.14505 0.44368 0.17365 0.3635 -0.51496 -0.12889 -0.19713 0.18096 -0.011301 0.84409 0.98606 0.83535 0.3541 -0.23395 0.3551 0.41899 -0.054763 0.22902 -0.19593 -0.57777 0.29728 0.33972 -0.31119 -0.32498 -0.42557 -0.70302 -0.72515 -0.29349 0.49964 -0.32889 0.24359 0.13243 0.31164 1.2156 0.31241 -0.23794 0.38422 -0.321 -0.28756 -0.20047 0.34454 -0.64929 0.28021 0.060203 0.053618 -0.13341 0.2451 0.18639 -0.0016346 -0.066883 0.077845 -0.085217 0.75257 0.76264 -0.053318 0.071056 0.30552 -0.43411 -0.19361 -0.10493 -0.53732 -0.239 -0.47298 -0.029825 -0.20206 -0.48945 -0.13616 0.49622 0.20743 -0.077396 -0.34304 0.0062387 -0.0065902 -0.24729 -0.013859 -0.079919 0.43452 0.23415 0.17995 0.13236 -0.22717 -0.55278 0.042005 0.21937 0.42042 0.43639 -0.58305 -0.118 0.15379 -0.29596 -0.46251 0.52593 0.10471 -0.19973 -0.028228 0.49974 -0.58053 -0.51416 0.21325 -0.38394 -0.00059821 0.16525 -0.055993 -0.4008 -0.05483 -3.8842 -0.022136 -0.46989 0.23502 0.081298 0.83091 0.47251 0.074057 0.15737 0.065809 -0.26756 0.1947 -0.63597 -0.59914 -0.21369 0.011718 -0.25464 -0.19629 0.18017 0.59031 0.0062176 0.51122 0.36601 -0.27381 -0.11342 0.21195 0.43099 -0.43837 0.12842 0.39312 -0.19492 0.056414 0.54343 0.13678 -0.71087 0.38758 -0.0078956 -0.32383 0.064193 -0.22329 0.071366 -0.30966 -0.46142 0.29545 -0.49186 0.24053 -0.46081 -0.077296')
# chair
gloves[9] = self.parse_vec('0.083778 -0.31358 0.44036 -0.19852 0.43794 0.51642 0.53045 0.38768 -0.25435 -0.13987 -0.087003 0.52748 -1.0245 0.26502 0.39768 -0.080842 -0.22176 0.25287 -0.22036 0.19245 0.31503 0.24298 -0.31244 -0.5538 -0.065636 1.1332 0.59765 -0.044034 -0.78153 -0.86698 0.28703 -0.76905 -0.084277 -0.22998 -0.15668 -0.3007 0.3213 0.056273 0.28742 0.2602 0.84825 -0.0071684 0.37892 -0.012884 0.00038517 0.17809 0.63603 0.89252 0.3586 0.20689 0.46894 -0.53883 0.0010013 -0.040398 0.0050846 -0.088845 0.40522 -0.00066163 0.40549 0.078797 0.22208 0.28788 0.7882 -0.70755 -0.39356 -0.29528 0.40909 -0.36923 0.72393 -0.17285 0.097639 -0.028392 -0.028554 -0.18386 -0.21958 0.41438 0.12902 0.29108 -0.49385 0.30497 0.020471 0.10858 -0.44766 -0.072593 0.50049 -0.34468 0.45321 0.1845 -0.35328 0.43199 -0.11018 0.26425 -0.63166 0.11634 0.67827 -0.57504 0.16556 -0.88157 -0.94127 0.35106 0.15176 -0.13839 0.12987 -0.33697 -1.1608 -0.14715 -0.054598 -0.32148 0.070592 -0.30956 -0.07437 0.76935 0.19682 -0.59907 -0.10843 0.39593 0.11362 -0.85316 0.10575 0.25386 -0.0021121 0.47077 -0.11135 0.35682 -0.13714 0.21096 0.058276 0.55903 0.25444 0.32109 0.35921 0.66993 -0.59417 -0.043362 -0.12672 -0.66172 -0.0062734 0.6619 0.13831 0.63765 -0.42123 -0.26323 0.13225 -0.62235 0.42746 -0.32953 0.17725 -0.2127 -0.13381 0.39902 -0.24999 -0.28896 -2.8864 0.3831 0.091285 0.35551 0.3535 0.061948 0.35884 -0.020577 0.19219 -0.018047 0.88794 0.11279 0.25829 0.14008 -0.00049045 0.33372 0.10877 -0.20534 0.49567 0.18442 -0.51278 0.39767 0.95853 -0.38023 -0.01555 0.52021 -0.40211 0.38038 0.25662 0.11418 0.833 -0.039078 0.19066 0.15591 -0.45687 -0.12533 0.96457 -0.77102 0.42057 -0.37074 0.20668 0.32806 -0.12334 -0.38058 0.66554 0.10284 -0.38228 -0.26866')
# cow
gloves[10] = self.parse_vec('-0.50022 -0.36807 0.67852 0.73902 -0.265 0.2138 0.80012 -0.32307 -0.022903 -0.095265 -0.049275 0.85775 -0.1414 -0.23757 0.53613 0.76321 0.63271 -0.98486 0.21919 -0.45295 0.63721 0.11644 -0.6411 -0.14992 0.22396 1.0825 -0.09032 0.063134 0.09663 0.39048 0.12483 0.52111 -0.30639 -0.11429 -0.36173 0.20997 -0.32267 0.3406 0.095895 -0.046656 0.34377 -0.12895 -0.6377 0.35499 0.095412 0.26032 0.11898 0.32955 1.1196 0.10973 0.15534 -0.12486 -0.35955 -0.013375 0.41262 -0.37091 0.62772 0.44115 0.11786 0.5494 -0.79519 0.58553 0.09613 0.076929 -0.19485 -0.094721 -0.40216 0.47339 0.031281 0.56596 -0.096632 -0.28741 -0.058642 -0.60075 -0.258 0.11909 -0.31724 0.21365 -0.036304 0.40186 0.28296 0.60792 -0.64312 0.25329 -0.82223 0.64957 -0.15475 -0.057517 -0.048461 0.31191 -0.46918 -0.29295 0.2265 0.15877 0.21139 -0.077235 0.37437 -0.14858 0.30027 0.48047 -0.098092 0.46117 -0.21483 0.13998 -0.83095 -0.45552 -0.11837 -0.11443 -0.31663 -0.79722 0.058454 -0.23475 0.066028 0.22309 0.14601 -0.044701 -0.33712 0.63045 -0.16638 -0.67182 -0.2189 -0.14132 -0.043728 0.54265 -0.37985 0.059618 0.075789 -0.55127 -0.27159 0.11659 0.3785 0.16998 0.66348 0.20145 -0.097833 0.18527 -0.097937 0.64232 -0.40563 -0.21788 -0.35083 0.52864 -0.20921 -0.98088 0.066697 0.42067 0.13533 0.10734 -0.22574 -0.052797 0.041153 0.14589 -2.7129 -0.13888 0.10586 -0.37203 -0.043385 0.59728 0.34913 0.2266 0.094155 -0.23491 0.20874 0.063022 -0.13774 -0.61335 -0.55479 -0.032523 -0.35708 2.6989e-05 0.29623 0.44281 -0.29544 0.40348 0.030594 -0.48329 -0.44488 0.29776 0.19371 -0.068755 -0.53631 0.31017 -0.086424 0.11114 -0.055969 0.33717 0.077037 -0.062266 -0.19782 0.3087 -0.011787 -0.092054 0.49202 0.9067 -0.3875 -0.38298 -0.51466 0.27193 -0.46579 0.39654')
# table (instead of diningtable)
gloves[11] = self.parse_vec('-0.134 -0.33646 0.54234 -0.38614 0.35032 -0.042428 0.65948 0.50268 -0.23358 0.065875 -0.2383 0.3261 -0.88971 0.1316 0.1286 0.54411 -0.060063 -0.58494 -0.87027 0.068012 0.23148 0.060188 -0.34582 -0.5468 0.10941 0.51938 0.082787 0.22915 -0.0094834 0.040299 0.24899 -0.306 -0.22724 -0.58301 0.20897 -0.29863 0.61531 0.20226 0.88812 0.25077 0.37314 -0.081076 0.21412 0.23626 0.20637 0.13475 0.38395 0.23572 0.19801 0.34831 -0.29573 0.057377 0.22969 0.20866 0.67706 -0.3422 0.19446 -0.048101 0.062835 -0.35476 0.36633 0.26445 0.38393 -0.2259 -0.35441 -0.17699 0.49916 -0.39928 1.2351 0.087057 -0.12733 -0.17771 -0.33468 0.35263 -0.012405 0.030928 0.52244 0.058012 0.042316 0.65819 0.056759 -0.4262 0.022662 -0.933 0.60916 -0.12176 0.42021 -0.393 -0.23767 0.074235 -0.073421 0.88081 -0.72143 -0.38029 0.50629 0.0015509 0.10175 -0.53257 -0.56345 0.93009 0.02815 -0.13692 -0.15743 0.22503 -0.64667 -0.28772 -0.68087 -0.41039 0.070034 0.022488 -0.42095 -0.02085 0.0089226 -0.49268 0.20415 0.20063 0.47755 -0.47341 -0.070567 0.35511 -0.19021 0.55616 0.071037 0.48354 0.053282 0.194 0.64685 0.70101 -0.051358 -0.15977 0.54975 0.0050765 -0.088246 -0.20462 -0.68097 -0.36608 -0.45045 0.098466 0.039217 0.79404 -0.26734 -0.16116 -0.20512 -0.80283 0.52077 -0.27359 0.61654 -0.25623 -0.29343 0.20662 -0.60995 -0.48954 -3.5513 0.20977 0.37195 0.41746 0.24383 -0.25487 0.17495 0.085444 0.23693 -0.12911 0.040175 -0.15206 0.15921 0.2538 -0.092471 0.21385 0.81152 0.22078 0.36054 0.2941 -0.45904 0.12069 0.71867 -0.17193 0.25481 0.63885 -0.34664 0.58897 -0.23721 -0.15426 0.35082 -0.58878 -0.0075455 -0.20697 -0.38027 -0.53076 0.060267 -0.59977 0.16978 -0.18702 0.27114 -0.44326 0.171 0.067128 0.218 -0.10632 0.33975 -0.32446')
# dog
gloves[12] = self.parse_vec('-0.49586 -0.59369 -0.107 0.05593 -0.24633 -0.14021 0.63707 0.024992 0.25119 -0.55602 -0.37298 0.60131 -0.35971 -0.096752 0.18511 0.58992 0.47578 -0.16833 0.67079 -0.29472 0.069403 0.05334 -0.36154 -0.12883 0.27814 0.87467 0.12119 0.78215 -0.50617 0.28794 0.14213 0.83281 -0.27079 -0.28813 -0.67607 0.17991 -0.11046 -0.063062 -0.56297 0.36639 0.11009 0.2965 -0.12457 -0.11112 -0.24293 0.53344 0.75589 0.078154 0.91641 0.20878 0.01236 -0.71199 0.19085 -0.5199 -0.14181 0.078136 0.44157 1.0958 0.59009 0.35117 0.021684 0.1073 0.19942 -0.26355 0.084024 -0.32073 -0.24306 0.44821 0.14432 -0.063988 -0.15013 -0.33644 -0.67873 -0.64554 0.10706 0.64709 -0.20094 0.064682 0.035356 0.029288 0.99793 0.34343 -0.019469 0.70635 -0.54329 -0.057843 0.12624 -0.18132 0.099001 0.4478 -0.2641 -0.37506 -0.11238 -0.011805 0.33187 0.45295 0.1682 0.18379 0.29457 0.98963 0.5394 -0.0025833 -0.10989 0.30163 0.34495 -0.2275 -0.21093 -0.79685 0.29833 -0.64644 -0.18653 0.31771 0.061874 -0.44503 0.34052 0.5552 0.017743 -0.33609 0.18478 0.392 -0.44685 -0.2591 -0.4929 0.61712 -0.24546 0.15348 0.19796 0.041105 0.030167 0.13735 0.29154 0.079533 0.53594 -0.61848 0.082946 -0.43806 -0.16041 -0.44336 0.065162 0.29823 -0.13321 0.55445 0.29978 -0.63209 -0.45078 0.1534 -0.31124 0.258 0.062033 0.047879 0.37758 -0.007643 -4.328 0.65362 -0.45488 -0.4565 0.23566 1.0171 0.53344 -0.025861 0.067191 0.60342 -0.56511 0.57175 -0.47311 -0.43066 -0.13385 0.011506 -0.32674 -0.47726 0.010775 0.49053 -0.11302 0.23358 0.098286 -0.55746 0.096976 0.036503 0.41838 -0.22967 0.12346 0.23573 -0.17653 0.03863 0.62339 -0.083598 -0.62161 0.11059 0.11316 -0.26833 0.023406 -0.018887 -0.63446 -0.16513 -0.16886 0.087242 -0.10353 0.06788 -0.20546 0.17962')
# horse
gloves[13] = self.parse_vec('-0.8107 -0.2135 0.57229 0.38901 -0.53731 0.076275 0.80555 -0.64481 0.58122 -0.003714 0.15482 0.5188 -0.73224 -0.17708 0.37883 1.0903 0.39686 -0.38992 0.45664 -0.31646 0.49369 -0.16371 -0.45948 -0.21822 0.34105 0.96526 0.25932 0.12078 0.012586 0.084278 0.50996 0.27742 -0.15154 -0.13721 -0.098856 0.12999 -0.41539 0.21986 -0.27817 -0.1278 0.1805 -0.71333 0.3577 0.42558 0.25589 0.443 0.36289 0.17151 1.0117 0.74856 0.26782 -0.029225 -0.36808 -0.13197 0.51501 0.13333 0.0058557 0.80578 -0.0721 0.70669 -0.50893 1.2565 0.20282 -0.13758 -0.5108 -0.34195 -0.24551 0.53538 0.2398 -0.30907 -0.20728 -0.82592 -0.34368 0.017876 0.092939 0.049257 -0.43085 -0.13684 0.019521 -0.20954 0.58053 -0.18977 -0.28645 0.44486 -0.5442 0.708 0.46365 0.086484 -0.042811 0.04067 -0.26089 -0.4174 -0.35112 -0.45257 0.27432 0.42729 0.4371 0.31975 0.017235 0.42254 -0.053444 -0.16006 -0.31785 0.33874 -0.23682 -0.34646 -0.30786 -0.55616 -0.045204 0.012021 -0.63051 0.3996 -0.29002 0.0079054 0.047329 0.5004 0.060087 -0.2037 0.12378 0.24339 -0.38377 -0.50928 -0.1049 0.14504 -0.39883 -0.24158 -0.33095 0.20819 0.81785 -0.34484 0.25812 0.017235 0.25583 -0.096405 0.16331 0.12816 -0.1257 0.11052 -0.19591 0.26462 -0.093251 0.74641 0.37195 -0.19395 -0.26052 -0.36437 0.46078 0.22374 -0.15367 0.3202 0.19659 -0.18048 -3.2003 0.24416 -0.36079 -0.022701 -0.10411 0.57065 0.20385 0.020388 0.78644 0.55647 -0.1408 -0.11196 -0.50173 -0.38527 -0.2307 0.062547 -0.54328 -0.56776 0.38209 0.10156 -0.16395 0.35198 0.55722 -0.34555 0.017989 -0.040839 0.28383 -0.049434 0.11944 0.086508 0.4774 0.073957 -0.23412 0.29014 -0.14949 -0.2585 -0.29038 1.0173 0.59803 -0.083486 0.30558 0.47593 0.026809 0.090965 0.052627 0.074359 -0.36702 0.20615')
# motorbike
gloves[14] = self.parse_vec('-0.71389 0.24032 0.18202 -0.088098 -0.0221 0.40635 -0.052222 -0.7371 0.10113 -0.53055 0.31645 0.038549 -0.19491 -0.32697 0.19943 -0.57279 0.37553 -0.032514 0.38359 0.17835 0.15913 -0.35313 0.02973 -0.23569 -0.43549 1.1884 0.14985 -0.16452 -0.40016 -0.56127 -0.33039 0.52782 0.3549 -0.20633 0.083044 0.2831 0.11659 0.30438 0.10226 0.10078 0.028741 0.47505 -0.20491 0.85168 0.27381 -0.36575 -0.10471 -0.12137 0.57409 0.20199 0.67158 0.24407 -0.37533 -0.47886 0.14464 0.39224 -0.0092274 0.1721 -0.1426 0.2096 0.29686 0.29672 0.51776 -0.47219 -0.47362 0.11739 -0.63042 0.49884 -0.23614 -0.24633 -0.70232 -0.25878 0.030875 -0.16369 0.46182 0.5136 -0.38601 -0.047488 -0.094195 0.13135 0.8398 0.36724 -0.71488 0.37607 -0.48174 0.091225 0.75595 0.025951 -0.53061 0.067239 -0.096866 -0.017189 -0.41604 -0.25577 0.062865 0.72158 0.40743 0.30805 -0.090951 0.41967 0.65751 -0.40103 -0.63055 0.71519 -0.21426 -0.93983 -0.062673 -0.26543 0.57424 -0.18904 -0.048883 0.16076 0.17014 0.066801 -0.33529 0.49848 -0.36642 -0.39713 -0.024494 0.20383 0.27226 -0.77433 0.30171 0.57367 -0.043116 0.079159 0.11836 0.44143 0.38227 0.052925 0.42209 0.25846 0.22436 0.099898 -0.56815 0.5277 0.045656 0.26299 -0.28591 -0.083705 -0.15414 0.31572 -0.33542 0.55337 -0.41729 0.14364 0.27906 0.25342 0.21754 0.048804 0.49028 0.074632 -0.86813 0.46902 0.071533 0.19098 0.034001 0.085827 0.53565 -0.41446 0.89643 0.55033 0.12035 0.35773 0.46279 -0.32748 -0.19938 -0.19853 -0.67121 -0.28168 0.062604 -0.08126 0.18431 -0.20818 0.5847 0.28582 0.29446 -0.4769 -0.039078 0.049103 0.16808 0.10381 0.10851 -0.38339 -0.52841 -0.53238 0.19347 -0.58147 -0.075303 0.94578 0.027874 0.1047 -0.28123 0.39113 -0.014863 -0.14572 0.27597 0.57036 0.051002 0.5199')
# person
gloves[15] = self.parse_vec('-0.0050341 0.43759 -0.10728 -0.12754 0.14574 0.44772 0.95882 -0.064739 -0.50419 0.33734 -0.023299 -0.16157 -0.50659 -0.19574 0.11752 0.45953 0.59953 0.52383 0.30061 -0.1844 0.13675 0.65594 -0.074337 -0.3523 -0.052698 1.6318 -0.0046084 -0.25087 0.089844 -0.18572 -0.22642 -0.10869 0.048051 -0.17346 -0.43151 0.046666 -0.17714 0.088511 0.2762 0.63112 0.41748 0.0931 0.13658 0.28507 -0.32909 0.089497 0.83896 0.098229 -0.059272 0.2835 -0.27827 0.19624 -0.049926 -0.69574 0.05352 0.060065 -0.068556 0.35591 -0.33751 -0.29361 -0.20059 -0.70989 0.46549 -0.44908 0.39502 0.49783 0.11653 0.54268 -0.48819 0.33826 -0.19704 -0.25727 0.26366 -0.22318 0.89299 -0.31712 0.10259 0.22438 -0.3718 -0.40868 0.38256 0.42004 -0.45121 -0.21513 0.014042 -0.049652 -0.11214 0.011164 -0.3606 -0.22827 -0.29906 0.53176 -0.054389 0.4932 -0.0052785 -0.086764 0.018286 -0.37717 0.51306 0.02191 0.014376 -0.40826 -0.054018 -0.92469 0.62715 -0.089945 0.20125 0.35328 -0.11475 0.15953 -0.26962 0.32959 0.060915 -0.14037 -0.20202 -0.2143 -0.034605 -0.011244 -0.59668 -0.091056 -0.71178 0.042869 -0.57287 -0.32826 -0.067884 -0.17087 -0.19935 -0.1571 0.044163 -0.31392 -0.23472 0.22923 -0.014186 0.6537 0.30681 -0.13804 0.021964 0.024048 0.47967 -0.3507 0.086764 0.68457 0.05042 -0.058323 0.59401 0.44433 -0.26444 -0.29732 0.031588 -0.43998 -0.16777 0.069608 -5.1022 0.47442 -0.27831 -0.10934 0.46917 -0.083847 0.25815 0.17722 0.39479 0.17018 -0.44708 -0.1237 -0.26057 -0.73399 -0.6979 0.36218 0.16067 -0.19531 0.13494 -0.14111 -0.2051 0.29239 -0.053072 0.0051988 -0.062671 -0.45236 0.38349 0.13699 -0.041298 0.29428 -0.23263 -0.032635 0.18313 0.23076 -0.62433 0.53785 0.33477 -0.2688 0.41107 -0.079753 0.32565 -0.42345 -0.12034 0.55607 -0.030407 0.2565 0.057437 -0.43445')
# plant (instead of pottedplant)
gloves[16] = self.parse_vec('-0.11111 0.057649 0.10509 -0.20679 -0.6105 0.31852 0.42001 -0.17437 0.082972 -0.54518 -0.39878 0.39278 -1.3354 -0.074956 -0.084963 0.30613 0.2464 -0.30133 -0.01795 -0.66445 -0.36118 -0.070666 -0.55449 0.11218 -0.2291 0.86695 -0.22428 0.14198 0.17671 -0.037329 0.037456 0.35369 0.29038 -0.65732 -0.20003 -0.2671 -0.053406 1.121 -0.17424 0.14477 -0.072602 -0.033538 -0.50043 0.23116 0.28005 0.62388 0.32728 -0.51442 0.1267 -0.31332 0.73642 0.3247 0.35872 -0.032586 0.022577 -0.04257 0.86809 0.13627 0.36609 0.28022 0.52616 0.79906 0.087693 -0.21913 0.024632 -0.053635 0.51211 0.17806 -0.40947 -0.079995 -0.56075 0.51136 0.77576 0.16721 0.19856 -0.00095677 -0.20017 -0.23092 0.37044 -0.58672 1.1888 0.084034 0.25076 0.077527 0.42798 0.18191 -0.15721 0.49148 -0.51208 0.1111 0.15223 0.25601 0.22023 -0.39595 -0.2301 0.51021 0.28086 0.5029 0.28635 0.40141 0.62646 0.091801 0.62058 -0.043011 -0.49427 -0.70087 -0.30576 -0.36211 -0.34107 0.57102 -0.8341 -0.070333 0.089223 -0.080423 -0.18906 -0.64046 -0.40274 0.22003 -1.3558 0.25769 -0.9221 0.25875 0.35121 -0.2247 0.10865 -0.44573 0.020137 0.42043 0.613 0.12949 0.36586 0.38093 -0.090628 0.62279 -0.20802 -1.1409 -0.07417 -0.103 -0.041084 -0.5689 -0.84172 -0.085305 -0.13616 -0.35128 -0.25108 -0.30743 -0.13205 0.0080276 0.17393 0.36135 0.18384 0.39011 -2.4754 -0.78184 -0.3437 0.1192 0.16951 0.10794 0.11712 -0.45028 -0.090958 0.19695 0.40981 -0.017894 0.36176 -0.23603 -0.38903 -0.1323 0.45368 -0.47899 -0.039468 -0.42611 0.72573 -0.39974 -0.44137 -0.30683 0.32104 0.75202 0.088467 -0.33332 -0.33403 -0.021241 -0.57665 -0.29877 0.86615 0.055897 0.44669 0.093072 0.36417 0.13264 0.26142 0.21439 1.0113 0.45556 -0.17741 0.63894 0.50394 -0.096641 0.093821 -0.37366')
# sheep
gloves[17] = self.parse_vec('-0.48883 -0.20854 0.19 0.47514 -0.51317 0.93146 0.39056 -0.063723 0.37478 -0.51601 0.17559 -0.43622 -0.43426 0.47602 0.37163 0.29038 -0.13016 -0.43486 0.46872 -0.37762 -0.27845 0.18054 0.19341 -0.34921 0.46227 1.0573 0.34446 -0.54068 0.025835 0.27329 0.0035218 0.72711 0.15759 0.23173 0.47177 0.091373 -0.33239 0.64459 0.2321 0.53376 0.77982 0.25495 0.32031 1.3828 0.32023 -0.28634 -0.034268 -0.29433 1.1474 0.54237 -0.6161 -0.40534 0.14573 -0.27524 0.19085 -0.27628 0.04799 0.4885 0.39575 0.36233 0.0032923 0.65519 -0.1076 0.089766 -0.4876 0.26514 -0.58268 0.032213 -0.090252 0.52496 0.10102 0.11129 -0.44439 -0.13691 -0.26121 0.0056128 -0.29664 -0.37598 0.39873 0.66627 0.97159 0.82369 -0.28087 1.0091 -0.50606 0.43705 0.14394 -0.11277 -0.11075 0.049597 0.11112 0.033074 -0.42926 -0.18468 -0.57982 -0.31848 0.59124 0.38171 0.18173 0.24726 0.33712 0.70201 -0.10992 0.79551 -0.34354 -0.32717 0.030538 0.24604 -0.16857 -0.77267 -0.45843 -0.0060385 -0.33472 -0.26437 -0.21247 0.24241 -0.46285 0.32434 0.077569 0.28511 -0.38589 -0.0041081 -0.19887 -0.50601 0.54081 -0.33611 -0.10492 -0.55035 0.66215 0.056054 0.0033579 0.51826 0.1167 0.49053 0.012476 -0.024986 -0.099266 0.069926 -0.50376 -0.26692 -0.52158 0.74391 -0.29793 -0.74214 0.13901 0.32073 -0.30176 0.19213 -0.071006 -0.47931 -0.045606 0.083413 -2.2626 -0.62771 0.18383 -0.20006 0.20747 0.93842 0.039725 -0.073622 0.95418 0.13252 0.27428 0.020871 -0.99478 -0.60409 0.17769 0.30552 -1.1167 0.1562 0.042333 0.60667 0.31924 0.026862 -0.10894 -0.059597 -0.37697 -0.12174 0.6681 -0.45187 -0.65058 0.26808 0.014851 0.25176 0.15575 0.14464 -0.050168 -0.16096 -0.52247 -0.58604 0.2535 -0.62094 -0.24828 0.26319 -0.77109 -0.49111 -0.56805 -0.14462 -1.0704 -0.44761')
# sofa
gloves[18] = self.parse_vec('-0.55126 -0.25437 0.40944 -0.37035 0.15619 0.74781 -0.11626 -0.089062 0.32154 -0.44794 -0.79694 0.2678 -0.28714 -0.18248 -0.099868 0.0044547 0.14694 -0.35398 -0.27681 0.50209 -0.69361 -0.18524 -0.24016 0.065474 -0.12418 -0.62691 0.10098 0.40514 -0.49283 -0.2973 -0.15004 -0.3799 0.16639 -0.27232 0.012692 -0.57008 0.71212 0.014504 -0.32036 0.36146 -0.24907 0.0045155 0.60505 0.10751 -0.012905 -0.7139 -0.0033347 0.49277 0.20195 0.24275 -0.4409 -0.85957 0.20849 -0.15209 -0.14172 0.089232 -0.01787 0.43873 0.50058 0.22154 -0.033405 0.13686 0.7646 -0.69327 -0.26288 -0.23971 -0.17341 -0.23787 0.43504 0.11938 0.11955 0.34672 -0.71679 -0.29109 -0.10691 0.68078 0.40884 0.11318 -0.57474 0.75584 1.0335 -0.2474 -0.15966 -0.012803 0.54782 0.057099 0.4468 -0.71945 0.045912 0.4424 0.12564 0.35803 -0.0099155 0.31759 0.24738 -0.64752 0.25505 -0.67181 -1.0412 1.119 0.64015 -0.11319 0.046755 -0.47351 -0.98552 -0.17908 -0.49623 -0.41556 -0.20261 -0.69094 -0.47285 0.26574 0.7891 -0.069767 -0.03166 -0.41765 0.44728 -0.89895 0.077562 0.1578 0.19055 0.31422 -0.034844 -0.0092608 0.52623 0.92685 0.31479 0.097895 -0.46618 -0.029873 0.22965 0.29097 -0.12395 -0.33712 -0.54815 -0.35642 0.058376 0.47008 0.11739 0.64565 0.26692 0.43143 -0.14349 -0.63512 0.016551 0.35399 -0.43784 -0.15109 0.20562 0.51676 0.31307 0.10661 -2.6111 0.43525 0.29244 0.30252 0.44164 -0.086565 0.36163 -0.42814 0.39952 0.6095 0.37047 0.41566 -0.097301 0.16248 0.10507 0.59692 0.20441 0.14534 0.3668 0.76102 -0.086382 0.24676 0.84287 -0.22111 -0.34975 0.40324 -0.2311 0.21589 0.12531 -0.19429 0.36596 0.18071 -0.090265 0.02224 -0.63616 0.52052 0.49612 0.082192 0.79162 -0.90827 0.53091 -0.42354 0.48783 -0.71943 -0.2491 0.33456 -0.13367 0.41854')
# train
gloves[19] = self.parse_vec('0.37548 -0.16669 0.20334 -0.1707 0.057389 0.63362 0.098189 0.17951 0.094536 0.61758 -0.012194 0.03028 -0.59888 -0.46359 -0.86279 0.16698 0.17168 0.33183 -0.34339 0.28135 0.25715 -0.61989 0.25431 -0.3545 -0.23358 0.82254 -0.19874 -0.59826 0.41849 -0.2918 -0.010124 0.035356 -0.22821 -0.024697 0.29794 -0.19534 -0.57675 0.1217 1.1021 -0.36827 -0.20924 -0.33711 -0.043826 -0.59845 0.2646 -0.51695 -0.33889 -0.12732 0.15502 -0.07516 0.34644 -0.75462 0.068238 -0.13422 -0.76469 0.37285 -0.052013 0.65885 -0.042933 -0.28987 -0.11953 0.083422 0.32609 -0.17798 -0.41476 -0.65127 0.44529 0.89459 -0.020621 -0.2502 -0.098399 0.38612 -0.090363 -0.42287 -0.031872 0.56521 -0.2458 0.25975 -0.40278 -0.15071 0.2289 -0.61254 -0.10832 0.045791 -0.082635 0.85964 -0.099326 0.072384 0.61234 -0.067309 0.44315 0.37082 -0.70074 1.0807 0.071388 0.44729 0.30407 0.2371 -0.085221 0.15809 0.75598 -0.35196 0.044777 -0.12434 -0.24014 -0.53403 0.24857 -0.171 1.1383 0.13646 -0.097531 -0.21034 0.08839 -0.52547 0.48343 -0.34049 0.08137 -0.54899 0.11817 1.1512 -0.078584 -0.3733 -0.15421 0.30997 -0.79899 -0.029586 -0.018026 -0.0035351 0.18488 1.0739 -0.055238 -0.14807 0.61702 -0.25605 -0.14685 -0.061761 -0.37885 0.479 -0.3825 -0.061271 -1.0717 0.37763 -0.74767 -0.40958 0.76752 -0.43954 0.2279 -0.42838 -0.80615 -1.0569 0.36154 -0.6756 -3.9798 0.43417 0.058424 -0.25163 0.017483 -0.03925 0.078241 0.47291 0.21551 0.32782 -0.19112 0.47168 -0.48036 -0.62983 -0.23916 -0.078116 -0.99057 -0.31946 -0.040178 -0.061123 -0.5638 -1.2017 1.0233 -0.81923 0.70827 0.47827 0.090528 0.32272 0.44516 0.26923 0.19288 0.69647 0.22837 -0.64528 0.13395 0.5601 0.58335 -0.065198 -0.016235 -0.18649 0.47786 0.54648 0.61327 0.14863 -0.098438 -0.33517 0.48419 0.22443')
# monitor (instead of tvmonitor)
gloves[20] = self.parse_vec('0.39506 0.57035 -0.34469 -0.2418 -0.085844 0.076654 -0.101 -0.043672 0.35994 -0.068255 0.2001 -0.18981 -0.807 -0.10697 -0.49271 -0.62257 0.033404 0.043097 -0.16137 0.037069 -0.092297 0.71918 -0.33535 0.99444 -0.23735 -0.18001 -0.3563 -0.035004 -0.42524 0.020921 0.59765 -0.68987 0.42215 -0.039423 0.81596 -1.0068 0.056338 -0.28865 -0.3757 0.41928 0.026622 0.43745 -0.34303 -0.038377 -0.74057 -0.060697 -0.25378 0.020666 -0.29184 -0.3001 0.0055599 0.24966 -0.58941 -0.46169 -0.14104 0.056481 -0.22584 -0.093435 0.50993 0.079872 0.085146 -0.030725 0.92953 0.31664 0.45899 -0.16236 0.18509 -0.3883 0.36874 -0.094179 0.080235 0.3334 -0.55517 -0.52172 0.035944 0.14773 0.20172 -0.43234 0.26623 -0.18526 0.447 0.72035 0.45101 -0.44633 0.42394 -0.31974 0.26068 0.39124 0.30794 0.27531 -0.74552 0.38866 0.24196 -0.015859 0.10625 -0.27747 0.19079 -0.52362 -0.65494 0.78146 0.40401 0.28761 0.43292 -0.73568 -0.43771 -0.6939 -0.48592 0.055997 0.086118 -0.63828 -0.23677 -0.0023004 0.41527 -0.12113 0.76192 0.55498 -0.13573 0.069332 -0.56071 0.57345 0.1751 0.061245 -0.19107 0.35412 0.44524 0.52874 -0.067264 0.26821 0.12174 0.060287 -0.47326 0.10187 0.044595 0.60027 -0.22113 -0.63658 -0.36007 -0.14078 -0.15648 -0.064581 0.19529 -0.47165 0.1875 -0.20738 0.49041 -0.15263 -0.18486 -0.62451 -0.065947 -0.18168 0.55734 -0.3354 -2.1831 0.073034 -0.55689 0.20047 0.48988 -0.25795 0.16526 0.13197 -0.0024545 0.0057711 0.74506 0.007249 -0.53246 0.045723 -0.45975 -0.96999 0.74073 0.1641 0.49533 -0.030726 0.11014 -0.36607 0.04882 -0.26971 0.52963 0.40551 -0.31313 0.26866 0.19646 0.71257 0.22745 -0.50536 0.34653 0.51053 0.014612 -0.1723 0.056945 0.66266 0.71526 0.03419 0.17104 -0.049182 -0.27842 -0.29963 0.41816 0.16741 0.34322 0.25798')
return gloves
def set_emb_vecs(self, emb_selection):
if emb_selection == 'glove':
embs = self.set_glove()
elif emb_selection == 'random':
embs = np.random.randn(len(self.CLASSES), self.EMB_DIM)
elif emb_selection == 'farthest_points':
pts = np.random.randn(100 * len(self.CLASSES), self.EMB_DIM)
inds = generate_farthest_vecs(pts, len(self.CLASSES))
embs = pts[inds]
elif emb_selection == 'orthogonal':
pts = np.random.randn(self.EMB_DIM, self.EMB_DIM)
q, _ = np.linalg.qr(pts, 'complete')
embs = q.T[:len(self.CLASSES)]
else:
raise AssertionError('Unknown emb_selection: {}'.format(emb_selection))
embs = embs.astype(np.float32)
return embs
def prepare_test_img(self, idx):
img_info = self.img_infos[idx]
ann_info = self.get_ann_info(idx)
results = dict(img_info=img_info, ann_info=ann_info)
self.pre_pipeline(results)
seg_map = self.gt_seg_map_loader(results)
results = self.pipeline(results)
results['gt_semantic_seg'] = seg_map['gt_semantic_seg']
return results
| 325.16 | 1,777 | 0.693935 | 9,002 | 40,645 | 3.121417 | 0.472895 | 0.006264 | 0.008968 | 0.009716 | 0.011353 | 0.007687 | 0.00363 | 0.00363 | 0.00363 | 0.00363 | 0 | 0.732884 | 0.135761 | 40,645 | 124 | 1,778 | 327.782258 | 0.067042 | 0.008562 | 0 | 0.023529 | 1 | 0.247059 | 0.903942 | 0.000621 | 0 | 0 | 0 | 0 | 0.023529 | 1 | 0.058824 | false | 0 | 0.070588 | 0.011765 | 0.223529 | 0.023529 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
47f3ffa35ff6bef996b55bed30381d8d8da1530a | 43 | py | Python | example_pkg/example.py | IoC-Sunderland/Example-Package-Structure | 3664c780a52d73ac93cb6bab83c1506c0a9c08c9 | [
"MIT"
] | null | null | null | example_pkg/example.py | IoC-Sunderland/Example-Package-Structure | 3664c780a52d73ac93cb6bab83c1506c0a9c08c9 | [
"MIT"
] | null | null | null | example_pkg/example.py | IoC-Sunderland/Example-Package-Structure | 3664c780a52d73ac93cb6bab83c1506c0a9c08c9 | [
"MIT"
] | null | null | null | def my_func():
return 'Hi, we made it'
| 10.75 | 25 | 0.604651 | 8 | 43 | 3.125 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.255814 | 43 | 3 | 26 | 14.333333 | 0.78125 | 0 | 0 | 0 | 0 | 0 | 0.325581 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
9a127315b31fdad49a3649f080bf9aa672bf0683 | 269,731 | py | Python | sdks/python/appcenter_sdk/api/distribute_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/appcenter_sdk/api/distribute_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/appcenter_sdk/api/distribute_api.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from appcenter_sdk.api_client import ApiClient
class distributeApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def devices_registerUserForDevice(self, user_id, body, **kwargs): # noqa: E501
"""devices_registerUserForDevice # noqa: E501
Registers a user for an existing device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_registerUserForDevice(user_id, body, async=True)
>>> result = thread.get()
:param async bool
:param string user_id: The ID of the user (required)
:param object body: The device info. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_registerUserForDevice_with_http_info(user_id, body, **kwargs) # noqa: E501
else:
(data) = self.devices_registerUserForDevice_with_http_info(user_id, body, **kwargs) # noqa: E501
return data
def devices_registerUserForDevice_with_http_info(self, user_id, body, **kwargs): # noqa: E501
"""devices_registerUserForDevice # noqa: E501
Registers a user for an existing device # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_registerUserForDevice_with_http_info(user_id, body, async=True)
>>> result = thread.get()
:param async bool
:param string user_id: The ID of the user (required)
:param object body: The device info. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_registerUserForDevice" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `devices_registerUserForDevice`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `devices_registerUserForDevice`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/users/{user_id}/devices/register', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_deviceDetails(self, device_udid, **kwargs): # noqa: E501
"""devices_deviceDetails # noqa: E501
Returns the device details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_deviceDetails(device_udid, async=True)
>>> result = thread.get()
:param async bool
:param string device_udid: The UDID of the device (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_deviceDetails_with_http_info(device_udid, **kwargs) # noqa: E501
else:
(data) = self.devices_deviceDetails_with_http_info(device_udid, **kwargs) # noqa: E501
return data
def devices_deviceDetails_with_http_info(self, device_udid, **kwargs): # noqa: E501
"""devices_deviceDetails # noqa: E501
Returns the device details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_deviceDetails_with_http_info(device_udid, async=True)
>>> result = thread.get()
:param async bool
:param string device_udid: The UDID of the device (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_udid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_deviceDetails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_udid' is set
if ('device_udid' not in params or
params['device_udid'] is None):
raise ValueError("Missing the required parameter `device_udid` when calling `devices_deviceDetails`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_udid' in params:
path_params['device_udid'] = params['device_udid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/user/devices/{device_udid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_removeUserDevice(self, device_udid, **kwargs): # noqa: E501
"""devices_removeUserDevice # noqa: E501
Removes an existing device from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_removeUserDevice(device_udid, async=True)
>>> result = thread.get()
:param async bool
:param string device_udid: The UDID of the device (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_removeUserDevice_with_http_info(device_udid, **kwargs) # noqa: E501
else:
(data) = self.devices_removeUserDevice_with_http_info(device_udid, **kwargs) # noqa: E501
return data
def devices_removeUserDevice_with_http_info(self, device_udid, **kwargs): # noqa: E501
"""devices_removeUserDevice # noqa: E501
Removes an existing device from a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_removeUserDevice_with_http_info(device_udid, async=True)
>>> result = thread.get()
:param async bool
:param string device_udid: The UDID of the device (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_udid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_removeUserDevice" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_udid' is set
if ('device_udid' not in params or
params['device_udid'] is None):
raise ValueError("Missing the required parameter `device_udid` when calling `devices_removeUserDevice`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_udid' in params:
path_params['device_udid'] = params['device_udid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/user/devices/{device_udid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_userDevicesList(self, **kwargs): # noqa: E501
"""devices_userDevicesList # noqa: E501
Returns all devices associated with the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_userDevicesList(async=True)
>>> result = thread.get()
:param async bool
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_userDevicesList_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.devices_userDevicesList_with_http_info(**kwargs) # noqa: E501
return data
def devices_userDevicesList_with_http_info(self, **kwargs): # noqa: E501
"""devices_userDevicesList # noqa: E501
Returns all devices associated with the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_userDevicesList_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_userDevicesList" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/user/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_listTesterApps(self, **kwargs): # noqa: E501
"""releases_listTesterApps # noqa: E501
Return a list of applications that the user has tester permission to with the latest release for each. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listTesterApps(async=True)
>>> result = thread.get()
:param async bool
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_listTesterApps_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.releases_listTesterApps_with_http_info(**kwargs) # noqa: E501
return data
def releases_listTesterApps_with_http_info(self, **kwargs): # noqa: E501
"""releases_listTesterApps # noqa: E501
Return a list of applications that the user has tester permission to with the latest release for each. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listTesterApps_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_listTesterApps" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/tester/apps', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_getLatestByHash(self, app_secret, release_hash, **kwargs): # noqa: E501
"""releases_getLatestByHash # noqa: E501
Get a release with hash 'release_hash' or the 'latest' from all the distribution groups assigned to the current user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByHash(app_secret, release_hash, async=True)
>>> result = thread.get()
:param async bool
:param string app_secret: The secret of the target application (required)
:param string release_hash: The hash of the release or 'latest' to get the latest release from all the distribution groups assigned to the current user. (required)
:param string udid: When passing `udid` in the query string, a provisioning check for the given device ID will be done. Will be ignored for non-iOS platforms.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_getLatestByHash_with_http_info(app_secret, release_hash, **kwargs) # noqa: E501
else:
(data) = self.releases_getLatestByHash_with_http_info(app_secret, release_hash, **kwargs) # noqa: E501
return data
def releases_getLatestByHash_with_http_info(self, app_secret, release_hash, **kwargs): # noqa: E501
"""releases_getLatestByHash # noqa: E501
Get a release with hash 'release_hash' or the 'latest' from all the distribution groups assigned to the current user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByHash_with_http_info(app_secret, release_hash, async=True)
>>> result = thread.get()
:param async bool
:param string app_secret: The secret of the target application (required)
:param string release_hash: The hash of the release or 'latest' to get the latest release from all the distribution groups assigned to the current user. (required)
:param string udid: When passing `udid` in the query string, a provisioning check for the given device ID will be done. Will be ignored for non-iOS platforms.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['app_secret', 'release_hash', 'udid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_getLatestByHash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'app_secret' is set
if ('app_secret' not in params or
params['app_secret'] is None):
raise ValueError("Missing the required parameter `app_secret` when calling `releases_getLatestByHash`") # noqa: E501
# verify the required parameter 'release_hash' is set
if ('release_hash' not in params or
params['release_hash'] is None):
raise ValueError("Missing the required parameter `release_hash` when calling `releases_getLatestByHash`") # noqa: E501
collection_formats = {}
path_params = {}
if 'app_secret' in params:
path_params['app_secret'] = params['app_secret'] # noqa: E501
if 'release_hash' in params:
path_params['release_hash'] = params['release_hash'] # noqa: E501
query_params = []
if 'udid' in params:
query_params.append(('udid', params['udid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/sdk/apps/{app_secret}/releases/{release_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_getLatestByPublicDistributionGroup(self, app_secret, distribution_group_id, **kwargs): # noqa: E501
"""releases_getLatestByPublicDistributionGroup # noqa: E501
Get a release with 'latest' for the given public group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByPublicDistributionGroup(app_secret, distribution_group_id, async=True)
>>> result = thread.get()
:param async bool
:param string app_secret: The secret of the target application (required)
:param string distribution_group_id: the id for destination (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_getLatestByPublicDistributionGroup_with_http_info(app_secret, distribution_group_id, **kwargs) # noqa: E501
else:
(data) = self.releases_getLatestByPublicDistributionGroup_with_http_info(app_secret, distribution_group_id, **kwargs) # noqa: E501
return data
def releases_getLatestByPublicDistributionGroup_with_http_info(self, app_secret, distribution_group_id, **kwargs): # noqa: E501
"""releases_getLatestByPublicDistributionGroup # noqa: E501
Get a release with 'latest' for the given public group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByPublicDistributionGroup_with_http_info(app_secret, distribution_group_id, async=True)
>>> result = thread.get()
:param async bool
:param string app_secret: The secret of the target application (required)
:param string distribution_group_id: the id for destination (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['app_secret', 'distribution_group_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_getLatestByPublicDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'app_secret' is set
if ('app_secret' not in params or
params['app_secret'] is None):
raise ValueError("Missing the required parameter `app_secret` when calling `releases_getLatestByPublicDistributionGroup`") # noqa: E501
# verify the required parameter 'distribution_group_id' is set
if ('distribution_group_id' not in params or
params['distribution_group_id'] is None):
raise ValueError("Missing the required parameter `distribution_group_id` when calling `releases_getLatestByPublicDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'app_secret' in params:
path_params['app_secret'] = params['app_secret'] # noqa: E501
if 'distribution_group_id' in params:
path_params['distribution_group_id'] = params['distribution_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v0.1/public/sdk/apps/{app_secret}/distribution_groups/{distribution_group_id}/releases/latest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def distibutionReleases_installAnalytics(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""distibutionReleases_installAnalytics # noqa: E501
Notify download(s) for the provided distribution release(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.distibutionReleases_installAnalytics(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param object body: The install analytics request payload (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.distibutionReleases_installAnalytics_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.distibutionReleases_installAnalytics_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def distibutionReleases_installAnalytics_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""distibutionReleases_installAnalytics # noqa: E501
Notify download(s) for the provided distribution release(s). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.distibutionReleases_installAnalytics_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param object body: The install analytics request payload (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method distibutionReleases_installAnalytics" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `distibutionReleases_installAnalytics`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `distibutionReleases_installAnalytics`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `distibutionReleases_installAnalytics`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/csv', 'text/plain']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v0.1/public/apps/{owner_name}/{app_name}/install_analytics', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_getIosManifest(self, app_id, release_id, token, **kwargs): # noqa: E501
"""releases_getIosManifest # noqa: E501
Returns the manifest.plist in XML format for installing the release on a device. Only available for iOS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getIosManifest(app_id, release_id, token, async=True)
>>> result = thread.get()
:param async bool
:param string app_id: The ID of the application (required)
:param integer release_id: The release_id (required)
:param string token: A hash that authorizes the download if it matches the release info. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_getIosManifest_with_http_info(app_id, release_id, token, **kwargs) # noqa: E501
else:
(data) = self.releases_getIosManifest_with_http_info(app_id, release_id, token, **kwargs) # noqa: E501
return data
def releases_getIosManifest_with_http_info(self, app_id, release_id, token, **kwargs): # noqa: E501
"""releases_getIosManifest # noqa: E501
Returns the manifest.plist in XML format for installing the release on a device. Only available for iOS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getIosManifest_with_http_info(app_id, release_id, token, async=True)
>>> result = thread.get()
:param async bool
:param string app_id: The ID of the application (required)
:param integer release_id: The release_id (required)
:param string token: A hash that authorizes the download if it matches the release info. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['app_id', 'release_id', 'token'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_getIosManifest" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'app_id' is set
if ('app_id' not in params or
params['app_id'] is None):
raise ValueError("Missing the required parameter `app_id` when calling `releases_getIosManifest`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_getIosManifest`") # noqa: E501
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `releases_getIosManifest`") # noqa: E501
collection_formats = {}
path_params = {}
if 'app_id' in params:
path_params['app_id'] = params['app_id'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
query_params = []
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/v0.1/public/apps/{app_id}/releases/{release_id}/ios_manifest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeNotifications_getNotificationByAppId(self, owner_name, app_name, **kwargs): # noqa: E501
"""storeNotifications_getNotificationByAppId # noqa: E501
Application specific store service status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeNotifications_getNotificationByAppId(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeNotifications_getNotificationByAppId_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeNotifications_getNotificationByAppId_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def storeNotifications_getNotificationByAppId_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""storeNotifications_getNotificationByAppId # noqa: E501
Application specific store service status # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeNotifications_getNotificationByAppId_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeNotifications_getNotificationByAppId" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeNotifications_getNotificationByAppId`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeNotifications_getNotificationByAppId`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/store_service_status', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_getReleaseUpdateDevicesStatus(self, release_id, resign_id, owner_name, app_name, **kwargs): # noqa: E501
"""devices_getReleaseUpdateDevicesStatus # noqa: E501
Returns the resign status to the caller # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_getReleaseUpdateDevicesStatus(release_id, resign_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string release_id: The ID of the release. (required)
:param string resign_id: The ID of the resign operation. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean include_provisioning_profile: A boolean value that indicates if the provisioning profile should be return in addition to the status. When set to true, the provisioning profile will be returned only when status is 'complete' or 'preparing_for_testers'.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_getReleaseUpdateDevicesStatus_with_http_info(release_id, resign_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.devices_getReleaseUpdateDevicesStatus_with_http_info(release_id, resign_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def devices_getReleaseUpdateDevicesStatus_with_http_info(self, release_id, resign_id, owner_name, app_name, **kwargs): # noqa: E501
"""devices_getReleaseUpdateDevicesStatus # noqa: E501
Returns the resign status to the caller # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_getReleaseUpdateDevicesStatus_with_http_info(release_id, resign_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string release_id: The ID of the release. (required)
:param string resign_id: The ID of the resign operation. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean include_provisioning_profile: A boolean value that indicates if the provisioning profile should be return in addition to the status. When set to true, the provisioning profile will be returned only when status is 'complete' or 'preparing_for_testers'.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'resign_id', 'owner_name', 'app_name', 'include_provisioning_profile'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_getReleaseUpdateDevicesStatus" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `devices_getReleaseUpdateDevicesStatus`") # noqa: E501
# verify the required parameter 'resign_id' is set
if ('resign_id' not in params or
params['resign_id'] is None):
raise ValueError("Missing the required parameter `resign_id` when calling `devices_getReleaseUpdateDevicesStatus`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `devices_getReleaseUpdateDevicesStatus`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `devices_getReleaseUpdateDevicesStatus`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'resign_id' in params:
path_params['resign_id'] = params['resign_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'include_provisioning_profile' in params:
query_params.append(('include_provisioning_profile', params['include_provisioning_profile'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/update_devices/{resign_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_putDistributionTester(self, release_id, tester_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_putDistributionTester # noqa: E501
Update details about the specified tester associated with the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_putDistributionTester(release_id, tester_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string tester_id: The id of the tester (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body:(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_putDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_putDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_putDistributionTester_with_http_info(self, release_id, tester_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_putDistributionTester # noqa: E501
Update details about the specified tester associated with the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_putDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string tester_id: The id of the tester (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body:(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'tester_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_putDistributionTester" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_putDistributionTester`") # noqa: E501
# verify the required parameter 'tester_id' is set
if ('tester_id' not in params or
params['tester_id'] is None):
raise ValueError("Missing the required parameter `tester_id` when calling `releases_putDistributionTester`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_putDistributionTester`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_putDistributionTester`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'tester_id' in params:
path_params['tester_id'] = params['tester_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/testers/{tester_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_deleteDistributionTester(self, release_id, tester_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionTester # noqa: E501
Delete the given tester from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionTester(release_id, tester_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string tester_id: The id of the tester (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_deleteDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_deleteDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_deleteDistributionTester_with_http_info(self, release_id, tester_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionTester # noqa: E501
Delete the given tester from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionTester_with_http_info(release_id, tester_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string tester_id: The id of the tester (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'tester_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_deleteDistributionTester" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_deleteDistributionTester`") # noqa: E501
# verify the required parameter 'tester_id' is set
if ('tester_id' not in params or
params['tester_id'] is None):
raise ValueError("Missing the required parameter `tester_id` when calling `releases_deleteDistributionTester`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_deleteDistributionTester`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_deleteDistributionTester`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'tester_id' in params:
path_params['tester_id'] = params['tester_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/testers/{tester_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_addTesters(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addTesters # noqa: E501
Distributes a release to a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addTesters(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_addTesters_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releases_addTesters_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releases_addTesters_with_http_info(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addTesters # noqa: E501
Distributes a release to a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addTesters_with_http_info(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_addTesters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_addTesters`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_addTesters`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_addTesters`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releases_addTesters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/testers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_deleteDistributionStore(self, release_id, store_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionStore # noqa: E501
Delete the given distribution store from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionStore(release_id, store_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string store_id: The id of the distribution store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_deleteDistributionStore_with_http_info(release_id, store_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_deleteDistributionStore_with_http_info(release_id, store_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_deleteDistributionStore_with_http_info(self, release_id, store_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionStore # noqa: E501
Delete the given distribution store from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionStore_with_http_info(release_id, store_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string store_id: The id of the distribution store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'store_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_deleteDistributionStore" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_deleteDistributionStore`") # noqa: E501
# verify the required parameter 'store_id' is set
if ('store_id' not in params or
params['store_id'] is None):
raise ValueError("Missing the required parameter `store_id` when calling `releases_deleteDistributionStore`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_deleteDistributionStore`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_deleteDistributionStore`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'store_id' in params:
path_params['store_id'] = params['store_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/stores/{store_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_addStore(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addStore # noqa: E501
Distributes a release to a store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addStore(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_addStore_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releases_addStore_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releases_addStore_with_http_info(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addStore # noqa: E501
Distributes a release to a store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addStore_with_http_info(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_addStore" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_addStore`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_addStore`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_addStore`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releases_addStore`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/stores', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def provisioning_profile(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""provisioning_profile # noqa: E501
Return information about the provisioning profile. Only available for iOS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.provisioning_profile(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The release_id (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.provisioning_profile_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.provisioning_profile_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def provisioning_profile_with_http_info(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""provisioning_profile # noqa: E501
Return information about the provisioning profile. Only available for iOS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.provisioning_profile_with_http_info(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The release_id (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method provisioning_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `provisioning_profile`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `provisioning_profile`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `provisioning_profile`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/provisioning_profile', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_putDistributionGroup(self, release_id, group_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_putDistributionGroup # noqa: E501
Update details about the specified distribution group associated with the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_putDistributionGroup(release_id, group_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string group_id: The id of the releases destination (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body:(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_putDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_putDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_putDistributionGroup_with_http_info(self, release_id, group_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_putDistributionGroup # noqa: E501
Update details about the specified distribution group associated with the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_putDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string group_id: The id of the releases destination (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body:(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'group_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_putDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_putDistributionGroup`") # noqa: E501
# verify the required parameter 'group_id' is set
if ('group_id' not in params or
params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `releases_putDistributionGroup`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_putDistributionGroup`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_putDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'group_id' in params:
path_params['group_id'] = params['group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/groups/{group_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_deleteDistributionGroup(self, release_id, group_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionGroup # noqa: E501
Delete the given distribution group from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionGroup(release_id, group_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string group_id: The id of the distribution group (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_deleteDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_deleteDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_deleteDistributionGroup_with_http_info(self, release_id, group_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_deleteDistributionGroup # noqa: E501
Delete the given distribution group from the release # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteDistributionGroup_with_http_info(release_id, group_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string group_id: The id of the distribution group (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'group_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_deleteDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_deleteDistributionGroup`") # noqa: E501
# verify the required parameter 'group_id' is set
if ('group_id' not in params or
params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `releases_deleteDistributionGroup`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_deleteDistributionGroup`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_deleteDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'group_id' in params:
path_params['group_id'] = params['group_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/groups/{group_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_addDistributionGroup(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addDistributionGroup # noqa: E501
Distributes a release to a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addDistributionGroup(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_addDistributionGroup_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releases_addDistributionGroup_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releases_addDistributionGroup_with_http_info(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_addDistributionGroup # noqa: E501
Distributes a release to a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_addDistributionGroup_with_http_info(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_addDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_addDistributionGroup`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_addDistributionGroup`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_addDistributionGroup`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releases_addDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_getLatestByUser(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_getLatestByUser # noqa: E501
Get a release with id `release_id`. If `release_id` is `latest`, return the latest release that was distributed to the current user (from all the distribution groups). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByUser(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string release_id: The ID of the release, or `latest` to get the latest release from all the distribution groups assigned to the current user. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string udid: when supplied, this call will also check if the given UDID is provisioned. Will be ignored for non-iOS platforms. The value will be returned in the property is_udid_provisioned.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_getLatestByUser_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_getLatestByUser_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_getLatestByUser_with_http_info(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_getLatestByUser # noqa: E501
Get a release with id `release_id`. If `release_id` is `latest`, return the latest release that was distributed to the current user (from all the distribution groups). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByUser_with_http_info(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string release_id: The ID of the release, or `latest` to get the latest release from all the distribution groups assigned to the current user. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param string udid: when supplied, this call will also check if the given UDID is provisioned. Will be ignored for non-iOS platforms. The value will be returned in the property is_udid_provisioned.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'udid'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_getLatestByUser" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_getLatestByUser`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_getLatestByUser`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_getLatestByUser`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'udid' in params:
query_params.append(('udid', params['udid'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_updateDetails(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_updateDetails # noqa: E501
Update details of a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_updateDetails(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_updateDetails_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releases_updateDetails_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releases_updateDetails_with_http_info(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_updateDetails # noqa: E501
Update details of a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_updateDetails_with_http_info(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_updateDetails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_updateDetails`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_updateDetails`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_updateDetails`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releases_updateDetails`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_update(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_update # noqa: E501
Updates a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_update(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_update_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releases_update_with_http_info(release_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releases_update_with_http_info(self, release_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releases_update # noqa: E501
Updates a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_update_with_http_info(release_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_update`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_update`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_update`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releases_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_delete(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_delete # noqa: E501
Deletes a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_delete(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_delete_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_delete_with_http_info(release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_delete_with_http_info(self, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""releases_delete # noqa: E501
Deletes a release. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_delete_with_http_info(release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param integer release_id: The ID of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_delete`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_delete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/{release_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_availableToTester(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_availableToTester # noqa: E501
Return detailed information about releases avaiable to a tester. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_availableToTester(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean published_only: when *true*, filters out releases that were uploaded but were never distributed. Releases that under deleted distribution groups will not be filtered out.(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_availableToTester_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_availableToTester_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_availableToTester_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_availableToTester # noqa: E501
Return detailed information about releases avaiable to a tester. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_availableToTester_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean published_only: when *true*, filters out releases that were uploaded but were never distributed. Releases that under deleted distribution groups will not be filtered out.(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'published_only'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_availableToTester" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_availableToTester`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_availableToTester`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'published_only' in params:
query_params.append(('published_only', params['published_only'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases/filter_by_tester', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_list(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_list # noqa: E501
Return basic information about releases. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_list(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean published_only: When *true*, filters out releases that were uploaded but were never distributed. Releases that under deleted distribution groups will not be filtered out.(optional)
:param string scope: When the scope is 'tester', only includes releases that have been distributed to groups that the user belongs to.(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_list_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_list_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_list_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_list # noqa: E501
Return basic information about releases. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_list_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean published_only: When *true*, filters out releases that were uploaded but were never distributed. Releases that under deleted distribution groups will not be filtered out.(optional)
:param string scope: When the scope is 'tester', only includes releases that have been distributed to groups that the user belongs to.(optional)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'published_only', 'scope'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_list`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'published_only' in params:
query_params.append(('published_only', params['published_only'])) # noqa: E501
if 'scope' in params:
query_params.append(('scope', params['scope'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releaseUploads_complete(self, upload_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releaseUploads_complete # noqa: E501
Commits or aborts the upload process for a release for the specified application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releaseUploads_complete(upload_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string upload_id: The ID of the upload (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information (required)
:return: ReleaseUploadEndResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releaseUploads_complete_with_http_info(upload_id, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releaseUploads_complete_with_http_info(upload_id, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releaseUploads_complete_with_http_info(self, upload_id, owner_name, app_name, body, **kwargs): # noqa: E501
"""releaseUploads_complete # noqa: E501
Commits or aborts the upload process for a release for the specified application # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releaseUploads_complete_with_http_info(upload_id, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string upload_id: The ID of the upload (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information (required)
:return: ReleaseUploadEndResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['upload_id', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releaseUploads_complete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'upload_id' is set
if ('upload_id' not in params or
params['upload_id'] is None):
raise ValueError("Missing the required parameter `upload_id` when calling `releaseUploads_complete`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releaseUploads_complete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releaseUploads_complete`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releaseUploads_complete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'upload_id' in params:
path_params['upload_id'] = params['upload_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/release_uploads/{upload_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseUploadEndResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releaseUploads_create(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""releaseUploads_create # noqa: E501
Begins the upload process for a new release for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releaseUploads_create(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information (required)
:return: ReleaseUploadBeginResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releaseUploads_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.releaseUploads_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def releaseUploads_create_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""releaseUploads_create # noqa: E501
Begins the upload process for a new release for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releaseUploads_create_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The release information (required)
:return: ReleaseUploadBeginResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releaseUploads_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releaseUploads_create`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releaseUploads_create`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `releaseUploads_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/release_uploads', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReleaseUploadBeginResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_listLatest(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_listLatest # noqa: E501
Get the latest release from every distribution group associated with an application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listLatest(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_listLatest_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_listLatest_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_listLatest_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""releases_listLatest # noqa: E501
Get the latest release from every distribution group associated with an application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listLatest_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_listLatest" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_listLatest`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_listLatest`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/recent_releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_getRealTimeStatusByReleaseId(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getRealTimeStatusByReleaseId # noqa: E501
Return the Real time Status publishing of release from store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getRealTimeStatusByReleaseId(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param number release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_getRealTimeStatusByReleaseId_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_getRealTimeStatusByReleaseId_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_getRealTimeStatusByReleaseId_with_http_info(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getRealTimeStatusByReleaseId # noqa: E501
Return the Real time Status publishing of release from store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getRealTimeStatusByReleaseId_with_http_info(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param number release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_getRealTimeStatusByReleaseId" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_getRealTimeStatusByReleaseId`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `storeReleases_getRealTimeStatusByReleaseId`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_getRealTimeStatusByReleaseId`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_getRealTimeStatusByReleaseId`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases/{release_id}/realtimestatus', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleasePublishLogs_get(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleasePublishLogs_get # noqa: E501
Returns publish logs for a particular release published to a particular store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleasePublishLogs_get(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The ID of the realease (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleasePublishLogs_get_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleasePublishLogs_get_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleasePublishLogs_get_with_http_info(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleasePublishLogs_get # noqa: E501
Returns publish logs for a particular release published to a particular store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleasePublishLogs_get_with_http_info(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The ID of the realease (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleasePublishLogs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleasePublishLogs_get`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `storeReleasePublishLogs_get`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleasePublishLogs_get`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleasePublishLogs_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases/{release_id}/publish_logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_getPublishError(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getPublishError # noqa: E501
Return the Error Details of release which failed in publishing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getPublishError(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param number release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_getPublishError_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_getPublishError_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_getPublishError_with_http_info(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getPublishError # noqa: E501
Return the Error Details of release which failed in publishing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getPublishError_with_http_info(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param number release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_getPublishError" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_getPublishError`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `storeReleases_getPublishError`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_getPublishError`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_getPublishError`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases/{release_id}/publish_error_details', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_get(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_get # noqa: E501
Return releases published in a store for releaseId and storeId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_get(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_get_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_get_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_get_with_http_info(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_get # noqa: E501
Return releases published in a store for releaseId and storeId # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_get_with_http_info(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_get`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `storeReleases_get`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_get`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases/{release_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_delete(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_delete # noqa: E501
delete the release with release Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_delete(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_delete_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_delete_with_http_info(store_name, release_id, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_delete_with_http_info(self, store_name, release_id, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_delete # noqa: E501
delete the release with release Id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_delete_with_http_info(store_name, release_id, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string release_id: The id of the release (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'release_id', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_delete`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `storeReleases_delete`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_delete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases/{release_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_list(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_list # noqa: E501
Return all releases published in a store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_list(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_list_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_list_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_list_with_http_info(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_list # noqa: E501
Return all releases published in a store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_list_with_http_info(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_list`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_list`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def storeReleases_getLatest(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getLatest # noqa: E501
Returns the latest release published in a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getLatest(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.storeReleases_getLatest_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.storeReleases_getLatest_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def storeReleases_getLatest_with_http_info(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""storeReleases_getLatest # noqa: E501
Returns the latest release published in a store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.storeReleases_getLatest_with_http_info(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method storeReleases_getLatest" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `storeReleases_getLatest`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `storeReleases_getLatest`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `storeReleases_getLatest`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}/latest_release', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores_get(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""stores_get # noqa: E501
Return the store details for specified store name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_get(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stores_get_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.stores_get_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def stores_get_with_http_info(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""stores_get # noqa: E501
Return the store details for specified store name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_get_with_http_info(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `stores_get`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `stores_get`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `stores_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores_patch(self, store_name, owner_name, app_name, body, **kwargs): # noqa: E501
"""stores_patch # noqa: E501
Update the store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_patch(store_name, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Store update request (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stores_patch_with_http_info(store_name, owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.stores_patch_with_http_info(store_name, owner_name, app_name, body, **kwargs) # noqa: E501
return data
def stores_patch_with_http_info(self, store_name, owner_name, app_name, body, **kwargs): # noqa: E501
"""stores_patch # noqa: E501
Update the store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_patch_with_http_info(store_name, owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: Store update request (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `stores_patch`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `stores_patch`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `stores_patch`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `stores_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores_delete(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""stores_delete # noqa: E501
delete the store based on specific store name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_delete(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stores_delete_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.stores_delete_with_http_info(store_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def stores_delete_with_http_info(self, store_name, owner_name, app_name, **kwargs): # noqa: E501
"""stores_delete # noqa: E501
delete the store based on specific store name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_delete_with_http_info(store_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string store_name: The name of the store (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['store_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'store_name' is set
if ('store_name' not in params or
params['store_name'] is None):
raise ValueError("Missing the required parameter `store_name` when calling `stores_delete`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `stores_delete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `stores_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'store_name' in params:
path_params['store_name'] = params['store_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores/{store_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores_create(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""stores_create # noqa: E501
Create a new external store for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_create(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The store request (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stores_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.stores_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def stores_create_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""stores_create # noqa: E501
Create a new external store for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_create_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The store request (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `stores_create`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `stores_create`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `stores_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stores_list(self, owner_name, app_name, **kwargs): # noqa: E501
"""stores_list # noqa: E501
Get all the store details from Storage store table for a particular application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_list(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.stores_list_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.stores_list_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def stores_list_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""stores_list # noqa: E501
Get all the store details from Storage store table for a particular application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.stores_list_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: array
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stores_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `stores_list`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `stores_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_stores', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='array', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_getLatestByDistributionGroup(self, owner_name, app_name, distribution_group_name, release_id, **kwargs): # noqa: E501
"""releases_getLatestByDistributionGroup # noqa: E501
Return detailed information about a distributed release in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByDistributionGroup(owner_name, app_name, distribution_group_name, release_id, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param string distribution_group_name: The name of the distribution group. (required)
:param string release_id: Only supports the constant `latest`, specific IDs are not supported. `latest` will return the latest release in the distribution group. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_getLatestByDistributionGroup_with_http_info(owner_name, app_name, distribution_group_name, release_id, **kwargs) # noqa: E501
else:
(data) = self.releases_getLatestByDistributionGroup_with_http_info(owner_name, app_name, distribution_group_name, release_id, **kwargs) # noqa: E501
return data
def releases_getLatestByDistributionGroup_with_http_info(self, owner_name, app_name, distribution_group_name, release_id, **kwargs): # noqa: E501
"""releases_getLatestByDistributionGroup # noqa: E501
Return detailed information about a distributed release in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_getLatestByDistributionGroup_with_http_info(owner_name, app_name, distribution_group_name, release_id, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param string distribution_group_name: The name of the distribution group. (required)
:param string release_id: Only supports the constant `latest`, specific IDs are not supported. `latest` will return the latest release in the distribution group. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'distribution_group_name', 'release_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_getLatestByDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_getLatestByDistributionGroup`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_getLatestByDistributionGroup`") # noqa: E501
# verify the required parameter 'distribution_group_name' is set
if ('distribution_group_name' not in params or
params['distribution_group_name'] is None):
raise ValueError("Missing the required parameter `distribution_group_name` when calling `releases_getLatestByDistributionGroup`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_getLatestByDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
if 'distribution_group_name' in params:
path_params['distribution_group_name'] = params['distribution_group_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_groups/{distribution_group_name}/releases/{release_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_deleteWithDistributionGroupId(self, owner_name, app_name, distribution_group_name, release_id, **kwargs): # noqa: E501
"""releases_deleteWithDistributionGroupId # noqa: E501
Deletes a release with id 'release_id' in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteWithDistributionGroupId(owner_name, app_name, distribution_group_name, release_id, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param string distribution_group_name: The name of the distribution group. (required)
:param integer release_id: The ID identifying the unique release. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_deleteWithDistributionGroupId_with_http_info(owner_name, app_name, distribution_group_name, release_id, **kwargs) # noqa: E501
else:
(data) = self.releases_deleteWithDistributionGroupId_with_http_info(owner_name, app_name, distribution_group_name, release_id, **kwargs) # noqa: E501
return data
def releases_deleteWithDistributionGroupId_with_http_info(self, owner_name, app_name, distribution_group_name, release_id, **kwargs): # noqa: E501
"""releases_deleteWithDistributionGroupId # noqa: E501
Deletes a release with id 'release_id' in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_deleteWithDistributionGroupId_with_http_info(owner_name, app_name, distribution_group_name, release_id, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the app owner (required)
:param string app_name: The name of the app (required)
:param string distribution_group_name: The name of the distribution group. (required)
:param integer release_id: The ID identifying the unique release. (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'distribution_group_name', 'release_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_deleteWithDistributionGroupId" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_deleteWithDistributionGroupId`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_deleteWithDistributionGroupId`") # noqa: E501
# verify the required parameter 'distribution_group_name' is set
if ('distribution_group_name' not in params or
params['distribution_group_name'] is None):
raise ValueError("Missing the required parameter `distribution_group_name` when calling `releases_deleteWithDistributionGroupId`") # noqa: E501
# verify the required parameter 'release_id' is set
if ('release_id' not in params or
params['release_id'] is None):
raise ValueError("Missing the required parameter `release_id` when calling `releases_deleteWithDistributionGroupId`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
if 'distribution_group_name' in params:
path_params['distribution_group_name'] = params['distribution_group_name'] # noqa: E501
if 'release_id' in params:
path_params['release_id'] = params['release_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_groups/{distribution_group_name}/releases/{release_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def releases_listByDistributionGroup(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""releases_listByDistributionGroup # noqa: E501
Return basic information about distributed releases in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listByDistributionGroup(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.releases_listByDistributionGroup_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.releases_listByDistributionGroup_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def releases_listByDistributionGroup_with_http_info(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""releases_listByDistributionGroup # noqa: E501
Return basic information about distributed releases in a given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.releases_listByDistributionGroup_with_http_info(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['distribution_group_name', 'owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method releases_listByDistributionGroup" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'distribution_group_name' is set
if ('distribution_group_name' not in params or
params['distribution_group_name'] is None):
raise ValueError("Missing the required parameter `distribution_group_name` when calling `releases_listByDistributionGroup`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `releases_listByDistributionGroup`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `releases_listByDistributionGroup`") # noqa: E501
collection_formats = {}
path_params = {}
if 'distribution_group_name' in params:
path_params['distribution_group_name'] = params['distribution_group_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_groups/{distribution_group_name}/releases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_listCsvFormat(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""devices_listCsvFormat # noqa: E501
Returns all devices associated with the given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_listCsvFormat(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean unprovisioned_only: when true, filters out provisioned devices(optional)
:param array udids: multiple UDIDs which should be part of the resulting CSV.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_listCsvFormat_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.devices_listCsvFormat_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def devices_listCsvFormat_with_http_info(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""devices_listCsvFormat # noqa: E501
Returns all devices associated with the given distribution group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_listCsvFormat_with_http_info(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param boolean unprovisioned_only: when true, filters out provisioned devices(optional)
:param array udids: multiple UDIDs which should be part of the resulting CSV.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['distribution_group_name', 'owner_name', 'app_name', 'unprovisioned_only', 'udids'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_listCsvFormat" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'distribution_group_name' is set
if ('distribution_group_name' not in params or
params['distribution_group_name'] is None):
raise ValueError("Missing the required parameter `distribution_group_name` when calling `devices_listCsvFormat`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `devices_listCsvFormat`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `devices_listCsvFormat`") # noqa: E501
collection_formats = {}
path_params = {}
if 'distribution_group_name' in params:
path_params['distribution_group_name'] = params['distribution_group_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'unprovisioned_only' in params:
query_params.append(('unprovisioned_only', params['unprovisioned_only'])) # noqa: E501
if 'udids' in params:
query_params.append(('udids', params['udids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/csv', 'text/csv', 'text/csv']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_groups/{distribution_group_name}/devices/download_devices_list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def devices_list(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""devices_list # noqa: E501
Returns all devices associated with the given distribution group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_list(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param number release_id: when provided, gets the provisioning state of the devices owned by users of this distribution group when compared to the provided release.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.devices_list_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.devices_list_with_http_info(distribution_group_name, owner_name, app_name, **kwargs) # noqa: E501
return data
def devices_list_with_http_info(self, distribution_group_name, owner_name, app_name, **kwargs): # noqa: E501
"""devices_list # noqa: E501
Returns all devices associated with the given distribution group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.devices_list_with_http_info(distribution_group_name, owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string distribution_group_name: The name of the distribution group. (required)
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param number release_id: when provided, gets the provisioning state of the devices owned by users of this distribution group when compared to the provided release.(optional)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['distribution_group_name', 'owner_name', 'app_name', 'release_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method devices_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'distribution_group_name' is set
if ('distribution_group_name' not in params or
params['distribution_group_name'] is None):
raise ValueError("Missing the required parameter `distribution_group_name` when calling `devices_list`") # noqa: E501
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `devices_list`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `devices_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'distribution_group_name' in params:
path_params['distribution_group_name'] = params['distribution_group_name'] # noqa: E501
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
if 'release_id' in params:
query_params.append(('release_id', params['release_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/distribution_groups/{distribution_group_name}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def appleMapping_TestFlightGroups(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_TestFlightGroups # noqa: E501
Fetch all apple test flight groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_TestFlightGroups(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.appleMapping_TestFlightGroups_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.appleMapping_TestFlightGroups_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def appleMapping_TestFlightGroups_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_TestFlightGroups # noqa: E501
Fetch all apple test flight groups # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_TestFlightGroups_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method appleMapping_TestFlightGroups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `appleMapping_TestFlightGroups`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `appleMapping_TestFlightGroups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/apple_test_flight_groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def appleMapping_get(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_get # noqa: E501
Get mapping of apple app to an existing app in apple store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_get(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.appleMapping_get_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.appleMapping_get_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def appleMapping_get_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_get # noqa: E501
Get mapping of apple app to an existing app in apple store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_get_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method appleMapping_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `appleMapping_get`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `appleMapping_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/apple_mapping', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def appleMapping_delete(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_delete # noqa: E501
Delete mapping of apple app to an existing app in apple store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_delete(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.appleMapping_delete_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
else:
(data) = self.appleMapping_delete_with_http_info(owner_name, app_name, **kwargs) # noqa: E501
return data
def appleMapping_delete_with_http_info(self, owner_name, app_name, **kwargs): # noqa: E501
"""appleMapping_delete # noqa: E501
Delete mapping of apple app to an existing app in apple store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_delete_with_http_info(owner_name, app_name, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method appleMapping_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `appleMapping_delete`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `appleMapping_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data', 'application/json-patch+json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/apple_mapping', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def appleMapping_create(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""appleMapping_create # noqa: E501
Create a mapping for an existing app in apple store for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_create(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The apple app mapping object (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.appleMapping_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
else:
(data) = self.appleMapping_create_with_http_info(owner_name, app_name, body, **kwargs) # noqa: E501
return data
def appleMapping_create_with_http_info(self, owner_name, app_name, body, **kwargs): # noqa: E501
"""appleMapping_create # noqa: E501
Create a mapping for an existing app in apple store for the specified application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.appleMapping_create_with_http_info(owner_name, app_name, body, async=True)
>>> result = thread.get()
:param async bool
:param string owner_name: The name of the owner (required)
:param string app_name: The name of the application (required)
:param object body: The apple app mapping object (required)
:return: ErrorDetails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_name', 'app_name', 'body'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method appleMapping_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_name' is set
if ('owner_name' not in params or
params['owner_name'] is None):
raise ValueError("Missing the required parameter `owner_name` when calling `appleMapping_create`") # noqa: E501
# verify the required parameter 'app_name' is set
if ('app_name' not in params or
params['app_name'] is None):
raise ValueError("Missing the required parameter `app_name` when calling `appleMapping_create`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `appleMapping_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_name' in params:
path_params['owner_name'] = params['owner_name'] # noqa: E501
if 'app_name' in params:
path_params['app_name'] = params['app_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIToken'] # noqa: E501
return self.api_client.call_api(
'/v0.1/apps/{owner_name}/{app_name}/apple_mapping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ErrorDetails', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.345533 | 284 | 0.631941 | 31,520 | 269,731 | 5.178236 | 0.011643 | 0.048818 | 0.024703 | 0.032938 | 0.982251 | 0.974837 | 0.970616 | 0.9658 | 0.964379 | 0.960948 | 0 | 0.0159 | 0.27834 | 269,731 | 5,819 | 285 | 46.353497 | 0.822603 | 0.073091 | 0 | 0.822691 | 0 | 0 | 0.25125 | 0.0771 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.00124 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9a484625217b2e19f2b921f43140968c07ba23eb | 84 | py | Python | examples/get_appeal_status_example.py | KristN1/cube_appeals | f81503ee665d6a69f3a5b06a69bddb020194f713 | [
"MIT"
] | null | null | null | examples/get_appeal_status_example.py | KristN1/cube_appeals | f81503ee665d6a69f3a5b06a69bddb020194f713 | [
"MIT"
] | null | null | null | examples/get_appeal_status_example.py | KristN1/cube_appeals | f81503ee665d6a69f3a5b06a69bddb020194f713 | [
"MIT"
] | null | null | null | from cube_appeals import get_appeal_status
print(get_appeal_status.java("Jukaido")) | 28 | 42 | 0.857143 | 13 | 84 | 5.153846 | 0.769231 | 0.268657 | 0.447761 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059524 | 84 | 3 | 43 | 28 | 0.848101 | 0 | 0 | 0 | 0 | 0 | 0.082353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
7beef99b61677bcc7028eebfa0f76c49efa1e5cd | 7,984 | py | Python | src/system_of_equations/factorization.py | jestra52/supor-numerical-analysis-api | 3cebc86cf2bba95789de1cb45232aaad182f332f | [
"MIT"
] | 1 | 2020-06-09T17:18:01.000Z | 2020-06-09T17:18:01.000Z | src/system_of_equations/factorization.py | jestra52/supor-numerical-analysis-api | 3cebc86cf2bba95789de1cb45232aaad182f332f | [
"MIT"
] | null | null | null | src/system_of_equations/factorization.py | jestra52/supor-numerical-analysis-api | 3cebc86cf2bba95789de1cb45232aaad182f332f | [
"MIT"
] | null | null | null | import copy as cp
import math as mt
import numpy as np
import threading
class Factorization:
def cholesky(self, A, b):
result = {
'aMatrix': None,
'bMatrix': None,
'lMatrix': None,
'uMatrix': None,
'xMatrix': None,
'iterations': None,
'hasInfiniteSolutions': False,
'resultMessage': None,
'solutionFailed': False,
'error': False,
'errorMessage': None
}
n = len(A)
L = np.zeros((n, n))
U = np.zeros((n, n))
phases = list()
def diagonal_operation_async(k):
incr = 0
for p in range(0, k):
incr += L[k][p] * U[p][k]
L[k][k] = mt.sqrt(A[k][k] - incr)
U[k][k] = L[k][k]
def row_operation_async(k, i):
incr = 0
for r in range(0, k):
incr += L[i][r] * U[r][k]
L[i][k] = (A[i][k] - incr) / L[k][k]
def column_operation_async(k, j):
incr = 0
for s in range(0, k):
incr += L[k][s] * U[s][j]
U[k][j] = (A[k][j] - incr) / L[k][k]
for k in range(0, n):
thread = threading.Thread(target=diagonal_operation_async, args=([k]))
thread.start()
thread.join()
if L[k][k] == 0:
raise ZeroDivisionError
threads = list()
for i in range(k+1, n):
thread = threading.Thread(target=row_operation_async, args=(k, i))
threads.append(thread)
thread.start()
for thread in threads: thread.join()
threads.clear()
for j in range(k+1, n):
thread = threading.Thread(target=column_operation_async, args=(k, j))
threads.append(thread)
thread.start()
for thread in threads: thread.join()
if k < n - 1:
iteration = {
'lMatrix': list(map(lambda l: list(l), cp.deepcopy(L))),
'uMatrix': list(map(lambda u: list(u), cp.deepcopy(U))),
}
phases.append(cp.deepcopy(iteration))
if not result['error']:
result['aMatrix'] = A
result['bMatrix'] = b
result['lMatrix'] = L
result['uMatrix'] = U
result['xMatrix'] = self.solve_x(L, U, b)
result['iterations'] = phases
return result
def doolittle(self, A, b):
result = {
'aMatrix': None,
'bMatrix': None,
'lMatrix': None,
'uMatrix': None,
'xMatrix': None,
'iterations': None,
'hasInfiniteSolutions': False,
'resultMessage': None,
'solutionFailed': False,
'error': False,
'errorMessage': None
}
n = len(A)
L = np.zeros((n, n))
U = np.zeros((n, n))
phases = list()
def column_operation_async(k, j):
incr = 0
for p in range(k):
incr += L[k][p] * U[p][j]
U[k][j] = (A[k][j] - incr)
def row_operation_async(k, i):
incr = 0
for r in range(k):
incr += L[i][r] * U[r][k]
L[i][k] = (A[i][k] - incr) / U[k][k]
for k in range(0,n):
threads = list()
for j in range(k, n):
thread = threading.Thread(target=column_operation_async, args=(k, j))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
if U[k][k] == 0:
raise ZeroDivisionError
threads.clear()
for i in range(k, n):
thread = threading.Thread(target=row_operation_async, args=(k, i))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
if k < n - 1:
iteration = {
'lMatrix': list(map(lambda l: list(l), cp.deepcopy(L))),
'uMatrix': list(map(lambda u: list(u), cp.deepcopy(U))),
}
phases.append(cp.deepcopy(iteration))
if not result['error']:
result['aMatrix'] = A
result['bMatrix'] = b
result['lMatrix'] = L
result['uMatrix'] = U
result['xMatrix'] = self.solve_x(L, U, b)
result['iterations'] = phases
return result
def crout(self, A, b):
result = {
'aMatrix': None,
'bMatrix': None,
'lMatrix': None,
'uMatrix': None,
'xMatrix': None,
'iterations': None,
'hasInfiniteSolutions': False,
'resultMessage': None,
'solutionFailed': False,
'error': False,
'errorMessage': None
}
n = len(A)
L = np.zeros((n, n))
U = np.zeros((n, n))
phases = list()
def row_operation_async(k, i):
incr = 0
for p in range(0,k):
incr += L[i][p] * U[p][k]
L[i][k] = A[i][k] - incr
def column_operation_async(k, j):
incr = 0
for p in range(0,k):
incr += L[k][p] * U[p][j]
U[k][j] = (A[k][j] - incr) / L[k][k]
for k in range(0, n):
threads = list()
for i in range(k, n):
thread = threading.Thread(target=row_operation_async, args=(k, i))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
if L[k][k] == 0:
raise ZeroDivisionError
threads.clear()
for j in range(k, n):
thread = threading.Thread(target=column_operation_async, args=(k, j))
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
if k < n - 1:
iteration = {
'lMatrix': list(map(lambda l: list(l), cp.deepcopy(L))),
'uMatrix': list(map(lambda u: list(u), cp.deepcopy(U))),
}
phases.append(cp.deepcopy(iteration))
if not result['error']:
result['aMatrix'] = A
result['bMatrix'] = b
result['lMatrix'] = L
result['uMatrix'] = U
result['xMatrix'] = self.solve_x(L, U, b)
result['iterations'] = phases
return result
def solve_z(self, L, b):
n = len(b)
Z = []
for i in range(n):
Z.append(0)
for i in range(0, n):
incr = 0
for p in range(0, i):
incr += L[i][p] * Z[p]
if L[i][i] == 0:
raise ZeroDivisionError
Z[i] = (b[i] - incr) / L[i][i]
return Z
def solve_x(self, L, U, b):
n = len(b)
Z = self.solve_z(L, b)
X = []
for i in range(n):
X.append(0)
i = n - 1
while i >= 0:
incr = 0
for p in range(i+1, n):
incr += U[i][p] * X[p]
if U[i][i] == 0:
raise ZeroDivisionError
X[i] = (Z[i] - incr) / U[i][i]
i -= 1
return X
def get_invertible_matrix(self, L, U):
n = len(L)
invertible_a = []
for i in range(0, n):
b = []
for j in range(0, n):
if j == i: b.append(1)
else: b.append(0)
invertible_a.append(self.solve_x(L, U, b))
return invertible_a
| 28.312057 | 85 | 0.423096 | 930 | 7,984 | 3.589247 | 0.088172 | 0.048232 | 0.02876 | 0.046135 | 0.87118 | 0.832534 | 0.816058 | 0.798682 | 0.782804 | 0.761833 | 0 | 0.008742 | 0.441258 | 7,984 | 281 | 86 | 28.412811 | 0.73952 | 0 | 0 | 0.732456 | 0 | 0 | 0.065005 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057018 | false | 0 | 0.017544 | 0 | 0.105263 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d0004c8e3eb27ae16c6eb46700d72b25a103fcf9 | 16,124 | py | Python | analyze_foldamers/tests/test_rmsd_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | null | null | null | analyze_foldamers/tests/test_rmsd_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | 33 | 2020-08-05T23:00:56.000Z | 2022-03-21T22:37:03.000Z | analyze_foldamers/tests/test_rmsd_clustering.py | shirtsgroup/analyze_foldamers | 17a7b948d1d0d4fbfb1d84d58753289404fb99a9 | [
"MIT"
] | null | null | null | """
Unit and regression test for the analyze_foldamers package.
"""
# Import package, test suite, and other packages as needed
import analyze_foldamers
import pytest
import sys
import os
import pickle
from cg_openmm.cg_model.cgmodel import CGModel
from analyze_foldamers.ensembles.cluster import *
current_path = os.path.dirname(os.path.abspath(__file__))
data_path = os.path.join(current_path, 'test_data')
def test_clustering_kmedoids_pdb(tmpdir):
"""Test Kmeans clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
n_clusters=2
frame_start=10
frame_stride=1
frame_end=-1
# Run KMeans clustering
medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_KMedoids(
pdb_file_list,
cgmodel,
n_clusters=n_clusters,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(cluster_rmsd) == n_clusters
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_1.pdb")
assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_kmedoids_pdb_no_cgmodel(tmpdir):
"""Test Kmeans clustering without a cgmodel"""
output_directory = tmpdir.mkdir("output")
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
n_clusters=2
frame_start=10
frame_stride=1
frame_end=-1
# Run KMeans clustering
medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_KMedoids(
pdb_file_list,
cgmodel=None,
n_clusters=n_clusters,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(cluster_rmsd) == n_clusters
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_1.pdb")
assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_kmedoids_dcd(tmpdir):
"""Test KMedoids clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
n_clusters=2
frame_start=10
frame_stride=1
frame_end=-1
# Run KMeans clustering
medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_KMedoids(
dcd_file_list,
cgmodel,
n_clusters=n_clusters,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(cluster_rmsd) == n_clusters
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_1.dcd")
assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_dbscan_pdb(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=3
eps=0.5
frame_start=10
frame_stride=1
frame_end=-1
# Run DBSCAN density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_DBSCAN(
pdb_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=False,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_dbscan_pdb_core_medoids(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=3
eps=0.5
frame_start=10
frame_stride=1
frame_end=-1
# Run DBSCAN density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_DBSCAN(
pdb_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=True,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_dbscan_pdb_no_cgmodel(tmpdir):
"""Test DBSCAN clustering without cgmodel object"""
output_directory = tmpdir.mkdir("output")
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=3
eps=0.5
frame_start=10
frame_stride=1
frame_end=-1
# Run DBSCAN density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_DBSCAN(
pdb_file_list,
cgmodel = None,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=False,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_dbscan_dcd(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
min_samples=3
eps=0.5
frame_start=10
frame_stride=1
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_DBSCAN(
dcd_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
core_points_only=False,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.dcd")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_optics_pdb(tmpdir):
"""Test OPTICS clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=5
frame_start=10
frame_stride=1
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_OPTICS(
pdb_file_list,
cgmodel,
min_samples=min_samples,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_optics_pdb_no_cgmodel(tmpdir):
"""Test OPTICS clustering without a cgmodel object"""
output_directory = tmpdir.mkdir("output")
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=5
frame_start=10
frame_stride=1
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_OPTICS(
pdb_file_list,
cgmodel = None,
min_samples=min_samples,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_optics_dcd(tmpdir):
"""Test OPTICS clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
dcd_file_list = []
for i in range(number_replicas):
dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))
# Set clustering parameters
min_samples=5
frame_start=10
frame_stride=1
frame_end=-1
# Run OPTICS density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_OPTICS(
dcd_file_list,
cgmodel,
min_samples=min_samples,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_format="dcd",
output_dir=output_directory,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.dcd")
assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")
def test_clustering_dbscan_pdb_output_clusters(tmpdir):
"""Test DBSCAN clustering"""
output_directory = tmpdir.mkdir("output")
# Load in cgmodel
cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")
cgmodel = pickle.load(open(cgmodel_path, "rb"))
# Create list of trajectory files for clustering analysis
number_replicas = 12
pdb_file_list = []
for i in range(number_replicas):
pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))
# Set clustering parameters
min_samples=3
eps=0.5
frame_start=10
frame_stride=1
frame_end=-1
# Run DBSCAN density-based clustering
medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \
get_cluster_medoid_positions_DBSCAN(
pdb_file_list,
cgmodel,
min_samples=min_samples,
eps=eps,
frame_start=frame_start,
frame_stride=frame_stride,
frame_end=-1,
output_dir=output_directory,
output_cluster_traj=True,
plot_silhouette=True,
plot_rmsd_hist=True,
filter=True,
filter_ratio=0.20,
)
assert len(labels) == len(original_indices)
assert os.path.isfile(f"{output_directory}/medoid_0.pdb")
assert os.path.isfile(f"{output_directory}/cluster_0.pdb")
| 33.52183 | 105 | 0.636691 | 1,959 | 16,124 | 4.933129 | 0.062787 | 0.072951 | 0.031043 | 0.046565 | 0.947951 | 0.938431 | 0.938431 | 0.938431 | 0.9375 | 0.936672 | 0 | 0.013196 | 0.276234 | 16,124 | 480 | 106 | 33.591667 | 0.81491 | 0.112999 | 0 | 0.898844 | 0 | 0 | 0.110439 | 0.092652 | 0 | 0 | 0 | 0 | 0.112717 | 1 | 0.031792 | false | 0 | 0.020231 | 0 | 0.052023 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d0df66dbd41f4ab625950abf8c91982c9557edad | 237 | py | Python | Homework_1.py | maximilianriesterer/friendster | 46b9a170eba3f3e05d56eadd6739b186ecf17939 | [
"Apache-2.0"
] | null | null | null | Homework_1.py | maximilianriesterer/friendster | 46b9a170eba3f3e05d56eadd6739b186ecf17939 | [
"Apache-2.0"
] | null | null | null | Homework_1.py | maximilianriesterer/friendster | 46b9a170eba3f3e05d56eadd6739b186ecf17939 | [
"Apache-2.0"
] | null | null | null | # 'hello world'
print("hello world")
print("hello \nworld")
#task 3
print("#########"'\n\n'"#", "#"'\n\n'"#","#"'\n\n'"#", "#"'\n\n'"#########"'\n\n'"#", "#"'\n\n'"#", "#"'\n\n'"#########"'\n\n'"#", "#"'\n\n'"#", "#", sep = " ")
| 29.625 | 166 | 0.303797 | 30 | 237 | 2.4 | 0.266667 | 0.472222 | 0.666667 | 0.833333 | 0.25 | 0.25 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0.004785 | 0.118143 | 237 | 7 | 167 | 33.857143 | 0.339713 | 0.080169 | 0 | 0 | 0 | 0 | 0.502326 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
d0eb6b0c32e036bf7fc8b7dcba367f2a471bf8f2 | 6,545 | py | Python | loldib/getratings/models/NA/na_twitch/na_twitch_top.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_twitch/na_twitch_top.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_twitch/na_twitch_top.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from getratings.models.ratings import Ratings
class NA_Twitch_Top_Aatrox(Ratings):
pass
class NA_Twitch_Top_Ahri(Ratings):
pass
class NA_Twitch_Top_Akali(Ratings):
pass
class NA_Twitch_Top_Alistar(Ratings):
pass
class NA_Twitch_Top_Amumu(Ratings):
pass
class NA_Twitch_Top_Anivia(Ratings):
pass
class NA_Twitch_Top_Annie(Ratings):
pass
class NA_Twitch_Top_Ashe(Ratings):
pass
class NA_Twitch_Top_AurelionSol(Ratings):
pass
class NA_Twitch_Top_Azir(Ratings):
pass
class NA_Twitch_Top_Bard(Ratings):
pass
class NA_Twitch_Top_Blitzcrank(Ratings):
pass
class NA_Twitch_Top_Brand(Ratings):
pass
class NA_Twitch_Top_Braum(Ratings):
pass
class NA_Twitch_Top_Caitlyn(Ratings):
pass
class NA_Twitch_Top_Camille(Ratings):
pass
class NA_Twitch_Top_Cassiopeia(Ratings):
pass
class NA_Twitch_Top_Chogath(Ratings):
pass
class NA_Twitch_Top_Corki(Ratings):
pass
class NA_Twitch_Top_Darius(Ratings):
pass
class NA_Twitch_Top_Diana(Ratings):
pass
class NA_Twitch_Top_Draven(Ratings):
pass
class NA_Twitch_Top_DrMundo(Ratings):
pass
class NA_Twitch_Top_Ekko(Ratings):
pass
class NA_Twitch_Top_Elise(Ratings):
pass
class NA_Twitch_Top_Evelynn(Ratings):
pass
class NA_Twitch_Top_Ezreal(Ratings):
pass
class NA_Twitch_Top_Fiddlesticks(Ratings):
pass
class NA_Twitch_Top_Fiora(Ratings):
pass
class NA_Twitch_Top_Fizz(Ratings):
pass
class NA_Twitch_Top_Galio(Ratings):
pass
class NA_Twitch_Top_Gangplank(Ratings):
pass
class NA_Twitch_Top_Garen(Ratings):
pass
class NA_Twitch_Top_Gnar(Ratings):
pass
class NA_Twitch_Top_Gragas(Ratings):
pass
class NA_Twitch_Top_Graves(Ratings):
pass
class NA_Twitch_Top_Hecarim(Ratings):
pass
class NA_Twitch_Top_Heimerdinger(Ratings):
pass
class NA_Twitch_Top_Illaoi(Ratings):
pass
class NA_Twitch_Top_Irelia(Ratings):
pass
class NA_Twitch_Top_Ivern(Ratings):
pass
class NA_Twitch_Top_Janna(Ratings):
pass
class NA_Twitch_Top_JarvanIV(Ratings):
pass
class NA_Twitch_Top_Jax(Ratings):
pass
class NA_Twitch_Top_Jayce(Ratings):
pass
class NA_Twitch_Top_Jhin(Ratings):
pass
class NA_Twitch_Top_Jinx(Ratings):
pass
class NA_Twitch_Top_Kalista(Ratings):
pass
class NA_Twitch_Top_Karma(Ratings):
pass
class NA_Twitch_Top_Karthus(Ratings):
pass
class NA_Twitch_Top_Kassadin(Ratings):
pass
class NA_Twitch_Top_Katarina(Ratings):
pass
class NA_Twitch_Top_Kayle(Ratings):
pass
class NA_Twitch_Top_Kayn(Ratings):
pass
class NA_Twitch_Top_Kennen(Ratings):
pass
class NA_Twitch_Top_Khazix(Ratings):
pass
class NA_Twitch_Top_Kindred(Ratings):
pass
class NA_Twitch_Top_Kled(Ratings):
pass
class NA_Twitch_Top_KogMaw(Ratings):
pass
class NA_Twitch_Top_Leblanc(Ratings):
pass
class NA_Twitch_Top_LeeSin(Ratings):
pass
class NA_Twitch_Top_Leona(Ratings):
pass
class NA_Twitch_Top_Lissandra(Ratings):
pass
class NA_Twitch_Top_Lucian(Ratings):
pass
class NA_Twitch_Top_Lulu(Ratings):
pass
class NA_Twitch_Top_Lux(Ratings):
pass
class NA_Twitch_Top_Malphite(Ratings):
pass
class NA_Twitch_Top_Malzahar(Ratings):
pass
class NA_Twitch_Top_Maokai(Ratings):
pass
class NA_Twitch_Top_MasterYi(Ratings):
pass
class NA_Twitch_Top_MissFortune(Ratings):
pass
class NA_Twitch_Top_MonkeyKing(Ratings):
pass
class NA_Twitch_Top_Mordekaiser(Ratings):
pass
class NA_Twitch_Top_Morgana(Ratings):
pass
class NA_Twitch_Top_Nami(Ratings):
pass
class NA_Twitch_Top_Nasus(Ratings):
pass
class NA_Twitch_Top_Nautilus(Ratings):
pass
class NA_Twitch_Top_Nidalee(Ratings):
pass
class NA_Twitch_Top_Nocturne(Ratings):
pass
class NA_Twitch_Top_Nunu(Ratings):
pass
class NA_Twitch_Top_Olaf(Ratings):
pass
class NA_Twitch_Top_Orianna(Ratings):
pass
class NA_Twitch_Top_Ornn(Ratings):
pass
class NA_Twitch_Top_Pantheon(Ratings):
pass
class NA_Twitch_Top_Poppy(Ratings):
pass
class NA_Twitch_Top_Quinn(Ratings):
pass
class NA_Twitch_Top_Rakan(Ratings):
pass
class NA_Twitch_Top_Rammus(Ratings):
pass
class NA_Twitch_Top_RekSai(Ratings):
pass
class NA_Twitch_Top_Renekton(Ratings):
pass
class NA_Twitch_Top_Rengar(Ratings):
pass
class NA_Twitch_Top_Riven(Ratings):
pass
class NA_Twitch_Top_Rumble(Ratings):
pass
class NA_Twitch_Top_Ryze(Ratings):
pass
class NA_Twitch_Top_Sejuani(Ratings):
pass
class NA_Twitch_Top_Shaco(Ratings):
pass
class NA_Twitch_Top_Shen(Ratings):
pass
class NA_Twitch_Top_Shyvana(Ratings):
pass
class NA_Twitch_Top_Singed(Ratings):
pass
class NA_Twitch_Top_Sion(Ratings):
pass
class NA_Twitch_Top_Sivir(Ratings):
pass
class NA_Twitch_Top_Skarner(Ratings):
pass
class NA_Twitch_Top_Sona(Ratings):
pass
class NA_Twitch_Top_Soraka(Ratings):
pass
class NA_Twitch_Top_Swain(Ratings):
pass
class NA_Twitch_Top_Syndra(Ratings):
pass
class NA_Twitch_Top_TahmKench(Ratings):
pass
class NA_Twitch_Top_Taliyah(Ratings):
pass
class NA_Twitch_Top_Talon(Ratings):
pass
class NA_Twitch_Top_Taric(Ratings):
pass
class NA_Twitch_Top_Teemo(Ratings):
pass
class NA_Twitch_Top_Thresh(Ratings):
pass
class NA_Twitch_Top_Tristana(Ratings):
pass
class NA_Twitch_Top_Trundle(Ratings):
pass
class NA_Twitch_Top_Tryndamere(Ratings):
pass
class NA_Twitch_Top_TwistedFate(Ratings):
pass
class NA_Twitch_Top_Twitch(Ratings):
pass
class NA_Twitch_Top_Udyr(Ratings):
pass
class NA_Twitch_Top_Urgot(Ratings):
pass
class NA_Twitch_Top_Varus(Ratings):
pass
class NA_Twitch_Top_Vayne(Ratings):
pass
class NA_Twitch_Top_Veigar(Ratings):
pass
class NA_Twitch_Top_Velkoz(Ratings):
pass
class NA_Twitch_Top_Vi(Ratings):
pass
class NA_Twitch_Top_Viktor(Ratings):
pass
class NA_Twitch_Top_Vladimir(Ratings):
pass
class NA_Twitch_Top_Volibear(Ratings):
pass
class NA_Twitch_Top_Warwick(Ratings):
pass
class NA_Twitch_Top_Xayah(Ratings):
pass
class NA_Twitch_Top_Xerath(Ratings):
pass
class NA_Twitch_Top_XinZhao(Ratings):
pass
class NA_Twitch_Top_Yasuo(Ratings):
pass
class NA_Twitch_Top_Yorick(Ratings):
pass
class NA_Twitch_Top_Zac(Ratings):
pass
class NA_Twitch_Top_Zed(Ratings):
pass
class NA_Twitch_Top_Ziggs(Ratings):
pass
class NA_Twitch_Top_Zilean(Ratings):
pass
class NA_Twitch_Top_Zyra(Ratings):
pass
| 15.695444 | 46 | 0.766692 | 972 | 6,545 | 4.736626 | 0.151235 | 0.209818 | 0.389661 | 0.479583 | 0.803432 | 0.803432 | 0 | 0 | 0 | 0 | 0 | 0 | 0.169748 | 6,545 | 416 | 47 | 15.733173 | 0.847258 | 0 | 0 | 0.498195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.498195 | 0.00361 | 0 | 0.501805 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 8 |
4be2c0ed62bf6ee4e5e780a10df446d3ce5503a7 | 14,567 | py | Python | kmip/core/policy.py | vbnmmnbv/PyKMIP | 4617ae528006178c466fe3945a477f568b596940 | [
"Apache-2.0"
] | 12 | 2016-09-14T21:59:10.000Z | 2020-03-11T07:37:25.000Z | kmip/core/policy.py | vbnmmnbv/PyKMIP | 4617ae528006178c466fe3945a477f568b596940 | [
"Apache-2.0"
] | 1 | 2021-06-25T15:43:48.000Z | 2021-06-25T15:43:48.000Z | kmip/core/policy.py | vbnmmnbv/PyKMIP | 4617ae528006178c466fe3945a477f568b596940 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import six
from kmip.core import enums
def read_policy_from_file(path):
with open(path, 'r') as f:
try:
policy_blob = json.loads(f.read())
except Exception as e:
raise ValueError(
"An error occurred while attempting to parse the JSON "
"file. {0}".format(e)
)
policies = dict()
for name, object_policies in six.iteritems(policy_blob):
processed_object_policies = dict()
for object_type, operation_policies in six.iteritems(object_policies):
processed_operation_policies = dict()
for operation, permission in six.iteritems(operation_policies):
try:
enum_operation = enums.Operation[operation]
except Exception:
raise ValueError(
"'{0}' is not a valid Operation value.".format(
operation
)
)
try:
enum_policy = enums.Policy[permission]
except Exception:
raise ValueError(
"'{0}' is not a valid Policy value.".format(
permission
)
)
processed_operation_policies.update([
(enum_operation, enum_policy)
])
try:
enum_type = enums.ObjectType[object_type]
except Exception:
raise ValueError(
"'{0}' is not a valid ObjectType value.".format(
object_type
)
)
processed_object_policies.update([
(enum_type, processed_operation_policies)
])
policies.update([(name, processed_object_policies)])
return policies
policies = {
'default': {
enums.ObjectType.CERTIFICATE: {
enums.Operation.LOCATE: enums.Policy.ALLOW_ALL,
enums.Operation.CHECK: enums.Policy.ALLOW_ALL,
enums.Operation.GET: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.SYMMETRIC_KEY: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.PUBLIC_KEY: {
enums.Operation.LOCATE: enums.Policy.ALLOW_ALL,
enums.Operation.CHECK: enums.Policy.ALLOW_ALL,
enums.Operation.GET: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_ALL,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.PRIVATE_KEY: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.SPLIT_KEY: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.TEMPLATE: {
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.SECRET_DATA: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.OPAQUE_DATA: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
},
enums.ObjectType.PGP_KEY: {
enums.Operation.REKEY: enums.Policy.ALLOW_OWNER,
enums.Operation.REKEY_KEY_PAIR: enums.Policy.ALLOW_OWNER,
enums.Operation.DERIVE_KEY: enums.Policy.ALLOW_OWNER,
enums.Operation.LOCATE: enums.Policy.ALLOW_OWNER,
enums.Operation.CHECK: enums.Policy.ALLOW_OWNER,
enums.Operation.GET: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_OWNER,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.ALLOW_OWNER,
enums.Operation.OBTAIN_LEASE: enums.Policy.ALLOW_OWNER,
enums.Operation.GET_USAGE_ALLOCATION: enums.Policy.ALLOW_OWNER,
enums.Operation.ACTIVATE: enums.Policy.ALLOW_OWNER,
enums.Operation.REVOKE: enums.Policy.ALLOW_OWNER,
enums.Operation.DESTROY: enums.Policy.ALLOW_OWNER,
enums.Operation.ARCHIVE: enums.Policy.ALLOW_OWNER,
enums.Operation.RECOVER: enums.Policy.ALLOW_OWNER
}
},
'public': {
enums.ObjectType.TEMPLATE: {
enums.Operation.LOCATE: enums.Policy.ALLOW_ALL,
enums.Operation.GET: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTES: enums.Policy.ALLOW_ALL,
enums.Operation.GET_ATTRIBUTE_LIST: enums.Policy.ALLOW_ALL,
enums.Operation.ADD_ATTRIBUTE: enums.Policy.DISALLOW_ALL,
enums.Operation.MODIFY_ATTRIBUTE: enums.Policy.DISALLOW_ALL,
enums.Operation.DELETE_ATTRIBUTE: enums.Policy.DISALLOW_ALL,
enums.Operation.DESTROY: enums.Policy.DISALLOW_ALL
}
}
}
| 56.243243 | 78 | 0.600192 | 1,469 | 14,567 | 5.756297 | 0.100749 | 0.253311 | 0.280038 | 0.327815 | 0.835383 | 0.833254 | 0.830653 | 0.817053 | 0.817053 | 0.793756 | 0 | 0.001213 | 0.320931 | 14,567 | 258 | 79 | 56.46124 | 0.853619 | 0.042287 | 0 | 0.710526 | 0 | 0 | 0.013275 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004386 | false | 0 | 0.013158 | 0 | 0.02193 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
4be395efd242dfddb071fb767e62b0f60185f97e | 13,689 | py | Python | code/vggnet.py | statisticszhang/Image-classification-caffe-model | 33084ca0841e768dae84db582e15bb29ffeeaaec | [
"MIT"
] | 1 | 2020-06-03T12:53:43.000Z | 2020-06-03T12:53:43.000Z | code/vggnet.py | statisticszhang/Image-classification-caffe-model | 33084ca0841e768dae84db582e15bb29ffeeaaec | [
"MIT"
] | null | null | null | code/vggnet.py | statisticszhang/Image-classification-caffe-model | 33084ca0841e768dae84db582e15bb29ffeeaaec | [
"MIT"
] | null | null | null | from caffe import layers as L
from caffe import params as P
import caffe
def conv_relu(bottom, num_output=64, kernel_size=3, stride=1, pad=1):
conv = L.Convolution(bottom, num_output=num_output, kernel_size=kernel_size, stride=stride, pad=pad,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0))
relu = L.ReLU(conv, in_place=True)
return conv, relu
def fc_relu_drop(bottom, fc_num_output=4096, dropout_ratio=0.5):
fc = L.InnerProduct(bottom, num_output=fc_num_output,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0)
)
relu = L.ReLU(fc, in_place=True)
drop = L.Dropout(fc, in_place=True, dropout_param=dict(dropout_ratio=dropout_ratio))
return fc, relu, drop
def conv_bn_scale_relu(bottom, num_output=64, kernel_size=3, stride=1, pad=1):
conv = L.Convolution(bottom, num_output=num_output, kernel_size=kernel_size, stride=stride, pad=pad,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0))
bn = L.BatchNorm(conv, use_global_stats=False, in_place=True)
scale = L.Scale(conv, scale_param=dict(bias_term=True), in_place=True)
relu = L.ReLU(conv, in_place=True)
return conv, bn, scale, relu
def accuracy_top1_top5(bottom, label):
accuracy_top1 = L.Accuracy(bottom, label, include=dict(phase=1))
accuracy_top5 = L.Accuracy(bottom, label, include=dict(phase=1), accuracy_param=dict(top_k=5))
return accuracy_top1, accuracy_top5
class VggNet(object):
def __init__(self, lmdb_train, lmdb_test, num_output):
self.train_data = lmdb_train
self.test_data = lmdb_test
self.classifier_num = num_output
def vgg_16_proto(self, batch_size, phase='TRAIN'):
n = caffe.NetSpec()
if phase == 'TRAIN':
source_data = self.train_data
mirror = True
else:
source_data = self.test_data
mirror = False
n.data, n.label = L.Data(source=source_data, backend=P.Data.LMDB, batch_size=batch_size, ntop=2,
transform_param=dict(crop_size=224, mean_value=[104, 117, 123], mirror=mirror))
n.conv1_1, n.relu1_1 = conv_relu(n.data, num_output=64)
n.conv1_2, n.relu1_2 = conv_relu(n.conv1_1, num_output=64)
n.pool1 = L.Pooling(n.conv1_2, pool=P.Pooling.MAX, kernel_size=2, stride=2) # 64x112x112
n.conv2_1, n.relu2_1 = conv_relu(n.pool1, num_output=128)
n.conv2_2, n.relu2_2 = conv_relu(n.conv2_1, num_output=128)
n.pool2 = L.Pooling(n.conv2_2, pool=P.Pooling.MAX, kernel_size=2, stride=2) # 128x56x56
n.conv3_1, n.relu3_1 = conv_relu(n.pool2, num_output=256)
n.conv3_2, n.relu3_2 = conv_relu(n.conv3_1, num_output=256)
n.conv3_3, n.relu3_3 = conv_relu(n.conv3_2, num_output=256)
n.pool3 = L.Pooling(n.conv3_3, pool=P.Pooling.MAX, kernel_size=2, stride=2) # 256x28x28
n.conv4_1, n.relu4_1 = conv_relu(n.pool3, num_output=512)
n.conv4_2, n.relu4_2 = conv_relu(n.conv4_1, num_output=512)
n.conv4_3, n.relu4_3 = conv_relu(n.conv4_2, num_output=512)
n.pool4 = L.Pooling(n.conv4_3, pool=P.Pooling.MAX, kernel_size=2, stride=2) # 512x14x14
n.conv5_1, n.relu5_1 = conv_relu(n.pool4, num_output=512)
n.conv5_2, n.relu5_2 = conv_relu(n.conv5_1, num_output=512)
n.conv5_3, n.relu5_3 = conv_relu(n.conv5_2, num_output=512)
n.pool5 = L.Pooling(n.conv5_3, pool=P.Pooling.MAX, kernel_size=2, stride=2) # 512x7x7
n.fc6, n.relu6, n.drop6 = fc_relu_drop(n.pool5, fc_num_output=4096, dropout_ratio=0.5)
n.fc7, n.relu7, n.drop7 = fc_relu_drop(n.fc6, fc_num_output=4096, dropout_ratio=0.5)
n.fc8 = L.InnerProduct(n.fc7, num_output=self.classifier_num,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0)
)
n.loss = L.SoftmaxWithLoss(n.fc8, n.label)
if phase == 'TRAIN':
pass
else:
n.accuracy_top1, n.accuracy_top5 = accuracy_top1_top5(n.fc8, n.label)
return n.to_proto()
def vgg_16_bn_proto(self, batch_size, phase='TRAIN'):
n = caffe.NetSpec()
if phase == 'TRAIN':
source_data = self.train_data
mirror = True
else:
source_data = self.test_data
mirror = False
n.data, n.label = L.Data(source=source_data, backend=P.Data.LMDB, batch_size=batch_size, ntop=2,
transform_param=dict(crop_size=224, mean_value=[104, 117, 123], mirror=mirror))
n.conv1_1, n.bn1_1, n.scale1_1, n.relu1_1 = conv_bn_scale_relu(n.data, num_output=64)
n.conv1_2, n.bn1_2, n.scale1_2, n.relu1_2 = conv_bn_scale_relu(n.conv1_1, num_output=64)
n.pool1 = L.Pooling(n.conv1_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv2_1, n.bn2_1, n.scale2_1, n.relu2_1 = conv_bn_scale_relu(n.pool1, num_output=128)
n.conv2_2, n.bn2_2, n.scale2_2, n.relu2_2 = conv_bn_scale_relu(n.conv2_1, num_output=128)
n.pool2 = L.Pooling(n.conv2_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv3_1, n.bn3_1, n.scale3_1, n.relu3_1 = conv_bn_scale_relu(n.pool2, num_output=256)
n.conv3_2, n.bn3_2, n.scale3_2, n.relu3_2 = conv_bn_scale_relu(n.conv3_1, num_output=256)
n.conv3_3, n.bn3_3, n.scale3_3, n.relu3_3 = conv_bn_scale_relu(n.conv3_2, num_output=256)
n.pool3 = L.Pooling(n.conv3_3, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv4_1, n.bn4_1, n.scale4_1, n.relu4_1 = conv_bn_scale_relu(n.pool3, num_output=512)
n.conv4_2, n.bn4_2, n.scale4_2, n.relu4_2 = conv_bn_scale_relu(n.conv4_1, num_output=512)
n.conv4_3, n.bn4_3, n.scale4_3, n.relu4_3 = conv_bn_scale_relu(n.conv4_2, num_output=512)
n.pool4 = L.Pooling(n.conv4_3, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv5_1, n.bn5_1, n.scale5_1, n.relu5_1 = conv_bn_scale_relu(n.pool4, num_output=512)
n.conv5_2, n.bn5_2, n.scale5_2, n.relu5_2 = conv_bn_scale_relu(n.conv5_1, num_output=512)
n.conv5_3, n.bn5_3, n.scale5_3, n.relu5_3 = conv_bn_scale_relu(n.conv5_2, num_output=512)
n.pool5 = L.Pooling(n.conv5_3, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.fc6, n.relu6, n.drop6 = fc_relu_drop(n.pool5, fc_num_output=4096, dropout_ratio=0.5)
n.fc7, n.relu7, n.drop7 = fc_relu_drop(n.fc6, fc_num_output=4096, dropout_ratio=0.5)
n.fc8 = L.InnerProduct(n.fc7, num_output=self.classifier_num,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0)
)
n.loss = L.SoftmaxWithLoss(n.fc8, n.label)
if phase == 'TRAIN':
pass
else:
n.accuracy_top1, n.accuracy_top5 = accuracy_top1_top5(n.fc8, n.label)
return n.to_proto()
def vgg_19_proto(self, batch_size, phase='TRAIN'):
n = caffe.NetSpec()
if phase == 'TRAIN':
source_data = self.train_data
mirror = True
else:
source_data = self.test_data
mirror = False
n.data, n.label = L.Data(source=source_data, backend=P.Data.LMDB, batch_size=batch_size, ntop=2,
transform_param=dict(crop_size=224, mean_value=[104, 117, 123], mirror=mirror))
n.conv1_1, n.relu1_1 = conv_relu(n.data, num_output=64)
n.conv1_2, n.relu1_2 = conv_relu(n.conv1_1, num_output=64)
n.pool1 = L.Pooling(n.conv1_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv2_1, n.relu2_1 = conv_relu(n.pool1, num_output=128)
n.conv2_2, n.relu2_2 = conv_relu(n.conv2_1, num_output=128)
n.pool2 = L.Pooling(n.conv2_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv3_1, n.relu3_1 = conv_relu(n.pool2, num_output=256)
n.conv3_2, n.relu3_2 = conv_relu(n.conv3_1, num_output=256)
n.conv3_3, n.relu3_3 = conv_relu(n.conv3_2, num_output=256)
n.conv3_4, n.relu3_4 = conv_relu(n.conv3_3, num_output=256)
n.pool3 = L.Pooling(n.conv3_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv4_1, n.relu4_1 = conv_relu(n.pool3, num_output=512)
n.conv4_2, n.relu4_2 = conv_relu(n.conv4_1, num_output=512)
n.conv4_3, n.relu4_3 = conv_relu(n.conv4_2, num_output=512)
n.conv4_4, n.relu4_4 = conv_relu(n.conv4_3, num_output=512)
n.pool4 = L.Pooling(n.conv4_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv5_1, n.relu5_1 = conv_relu(n.pool4, num_output=512)
n.conv5_2, n.relu5_2 = conv_relu(n.conv5_1, num_output=512)
n.conv5_3, n.relu5_3 = conv_relu(n.conv5_2, num_output=512)
n.conv5_4, n.relu5_4 = conv_relu(n.conv5_3, num_output=512)
n.pool5 = L.Pooling(n.conv5_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.fc6, n.relu6, n.drop6 = fc_relu_drop(n.pool5, fc_num_output=4096, dropout_ratio=0.5)
n.fc7, n.relu7, n.drop7 = fc_relu_drop(n.fc6, fc_num_output=4096, dropout_ratio=0.5)
n.fc8 = L.InnerProduct(n.fc7, num_output=self.classifier_num,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0)
)
n.loss = L.SoftmaxWithLoss(n.fc8, n.label)
if phase == 'TRAIN':
pass
else:
n.accuracy_top1, n.accuracy_top5 = accuracy_top1_top5(n.fc8, n.label)
return n.to_proto()
def vgg_19_bn_proto(self, batch_size, phase='TRAIN'):
n = caffe.NetSpec()
if phase == 'TRAIN':
source_data = self.train_data
mirror = True
else:
source_data = self.test_data
mirror = False
n.data, n.label = L.Data(source=source_data, backend=P.Data.LMDB, batch_size=batch_size, ntop=2,
transform_param=dict(crop_size=224, mean_value=[104, 117, 123], mirror=mirror))
n.conv1_1, n.bn1_1, n.scale1_1, n.relu1_1 = conv_bn_scale_relu(n.data, num_output=64)
n.conv1_2, n.bn1_2, n.scale1_2, n.relu1_2 = conv_bn_scale_relu(n.conv1_1, num_output=64)
n.pool1 = L.Pooling(n.conv1_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv2_1, n.bn2_1, n.scale2_1, n.relu2_1 = conv_bn_scale_relu(n.pool1, num_output=128)
n.conv2_2, n.bn2_2, n.scale2_2, n.relu2_2 = conv_bn_scale_relu(n.conv2_1, num_output=128)
n.pool2 = L.Pooling(n.conv2_2, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv3_1, n.bn3_1, n.scale3_1, n.relu3_1 = conv_bn_scale_relu(n.pool2, num_output=256)
n.conv3_2, n.bn3_2, n.scale3_2, n.relu3_2 = conv_bn_scale_relu(n.conv3_1, num_output=256)
n.conv3_3, n.bn3_3, n.scale3_3, n.relu3_3 = conv_bn_scale_relu(n.conv3_2, num_output=256)
n.conv3_4, n.bn3_4, n.scale3_4, n.relu3_4 = conv_bn_scale_relu(n.conv3_3, num_output=256)
n.pool3 = L.Pooling(n.conv3_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv4_1, n.bn4_1, n.scale4_1, n.relu4_1 = conv_bn_scale_relu(n.pool3, num_output=512)
n.conv4_2, n.bn4_2, n.scale4_2, n.relu4_2 = conv_bn_scale_relu(n.conv4_1, num_output=512)
n.conv4_3, n.bn4_3, n.scale4_3, n.relu4_3 = conv_bn_scale_relu(n.conv4_2, num_output=512)
n.conv4_4, n.bn4_4, n.scale4_4, n.relu4_4 = conv_bn_scale_relu(n.conv4_3, num_output=512)
n.pool4 = L.Pooling(n.conv4_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.conv5_1, n.bn5_1, n.scale5_1, n.relu5_1 = conv_bn_scale_relu(n.pool4, num_output=512)
n.conv5_2, n.bn5_2, n.scale5_2, n.relu5_2 = conv_bn_scale_relu(n.conv5_1, num_output=512)
n.conv5_3, n.bn5_3, n.scale5_3, n.relu5_3 = conv_bn_scale_relu(n.conv5_2, num_output=512)
n.conv5_4, n.bn5_4, n.scale5_4, n.relu5_4 = conv_bn_scale_relu(n.conv5_3, num_output=512)
n.pool5 = L.Pooling(n.conv5_4, pool=P.Pooling.MAX, kernel_size=2, stride=2)
n.fc6, n.relu6, n.drop6 = fc_relu_drop(n.pool5, fc_num_output=4096, dropout_ratio=0.5)
n.fc7, n.relu7, n.drop7 = fc_relu_drop(n.fc6, fc_num_output=4096, dropout_ratio=0.5)
n.fc8 = L.InnerProduct(n.fc7, num_output=self.classifier_num,
param=[dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
weight_filler=dict(type='gaussian', std=0.01),
bias_filler=dict(type='constant', value=0)
)
n.loss = L.SoftmaxWithLoss(n.fc8, n.label)
if phase == 'TRAIN':
pass
else:
n.accuracy_top1, n.accuracy_top5 = accuracy_top1_top5(n.fc8, n.label)
return n.to_proto()
| 54.106719 | 112 | 0.633355 | 2,358 | 13,689 | 3.402036 | 0.060645 | 0.090875 | 0.042508 | 0.057966 | 0.919471 | 0.913114 | 0.908626 | 0.908626 | 0.905011 | 0.886437 | 0 | 0.094988 | 0.236321 | 13,689 | 252 | 113 | 54.321429 | 0.672374 | 0.003506 | 0 | 0.816425 | 0 | 0 | 0.012615 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0.019324 | 0.014493 | 0 | 0.101449 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ef260c705a0104abd786285bdaab70dc8221bcbb | 330 | py | Python | backend/__init__.py | Lorioux/donatecare | cce4e40e4859a7fde0f1afa800b1080af728b230 | [
"Apache-2.0"
] | null | null | null | backend/__init__.py | Lorioux/donatecare | cce4e40e4859a7fde0f1afa800b1080af728b230 | [
"Apache-2.0"
] | 15 | 2021-07-09T09:32:23.000Z | 2021-07-21T07:45:33.000Z | backend/__init__.py | Lorioux/donatecare | cce4e40e4859a7fde0f1afa800b1080af728b230 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from backend.databases.config import *
from backend.booking.models import *
from backend.registration.models import *
from backend.scheduling.models import *
from backend.authentication.models import *
from backend.authentication import token_required
from backend import settings
| 33 | 50 | 0.824242 | 40 | 330 | 6.65 | 0.375 | 0.289474 | 0.383459 | 0.345865 | 0.278195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.127273 | 330 | 9 | 51 | 36.666667 | 0.923611 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
32482dc21506e4ccbcb07378aaf4ebf05b9a1b2f | 1,291 | py | Python | dev/google-submit.py | broxeph/ameryn | e1289c280ca865ec84625b712adc52c536b4b174 | [
"MIT"
] | null | null | null | dev/google-submit.py | broxeph/ameryn | e1289c280ca865ec84625b712adc52c536b4b174 | [
"MIT"
] | null | null | null | dev/google-submit.py | broxeph/ameryn | e1289c280ca865ec84625b712adc52c536b4b174 | [
"MIT"
] | null | null | null | def answer(n):
def convert_base(in_num, out_base):
output = []
for i in range(40):
if out_base**(i + 1) > in_num:
output.append(str(in_num / out_base**i))
in_num = in_num % out_base**i
for j in range(i - 1, -1, -1):
if out_base**j <= in_num:
output.append(str(in_num / out_base**j))
else:
output.append('0')
in_num = in_num % out_base**j
return ''.join(output)
for i in range(2, 40):
if str(convert_base(n, i)) == str(convert_base(n, i))[::-1]:
return i
def convert_base(in_num, out_base):
output = []
for i in range(40):
if out_base**(i + 1) > in_num:
output.append(str(in_num / out_base**i))
in_num = in_num % out_base**i
for j in range(i - 1, -1, -1):
if out_base**j <= in_num:
output.append(str(in_num / out_base**j))
else:
output.append('0')
in_num = in_num % out_base**j
return ''.join(output)
n = 111
print answer(n)
print '{0} is {1} in base {2}.'.format(n, convert_base(n, answer(n)), answer(n)) | 35.861111 | 80 | 0.464756 | 186 | 1,291 | 3.026882 | 0.145161 | 0.159858 | 0.142096 | 0.213144 | 0.843694 | 0.767318 | 0.767318 | 0.767318 | 0.767318 | 0.767318 | 0 | 0.030809 | 0.396592 | 1,291 | 36 | 80 | 35.861111 | 0.691913 | 0 | 0 | 0.787879 | 0 | 0 | 0.01935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.060606 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
088bfa9a4e6a04d183737c39a6d131e2cb00a284 | 17,035 | py | Python | restApp/models.py | ibamacsr/painelmma_api | a11a6cd63e312f09f445b139fcff8c11ab383764 | [
"MIT"
] | null | null | null | restApp/models.py | ibamacsr/painelmma_api | a11a6cd63e312f09f445b139fcff8c11ab383764 | [
"MIT"
] | null | null | null | restApp/models.py | ibamacsr/painelmma_api | a11a6cd63e312f09f445b139fcff8c11ab383764 | [
"MIT"
] | null | null | null | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.contrib.gis.db import models
from django.conf import settings
from .utils import get_reverse_month
class DailyAlertaAwifs(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
area_km2 = models.DecimalField(max_digits=38, decimal_places=2, blank=True, null=True)
dominio = models.CharField(max_length=200, blank=True, null=True)
tipo = models.CharField(max_length=15, blank=True, null=True)
uf = models.SmallIntegerField(blank=True, null=True)
estado = models.CharField(max_length=2, blank=True, null=True)
data_imagem = models.DateTimeField(blank=True, null=True)
geom = models.GeometryField(blank=True, null=True)
centroide = models.GeometryField(blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
estagio = models.CharField(max_length=50, blank=True, null=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
dominio_pi = models.CharField(max_length=255, blank=True, null=True)
dominio_us = models.CharField(max_length=255, blank=True, null=True)
dominio_ti = models.CharField(max_length=255, blank=True, null=True)
dominio_ap = models.CharField(max_length=255, blank=True, null=True)
dominio_as = models.CharField(max_length=255, blank=True, null=True)
dominio_fp = models.CharField(max_length=255, blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_alerta_awifs' % settings.SCHEMA
managed = False
except AttributeError:
pass
class DailyAlertaDeter(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
area_km2 = models.DecimalField(max_digits=38, decimal_places=2, blank=True, null=True)
dominio = models.CharField(max_length=200, blank=True, null=True)
tipo = models.CharField(max_length=15, blank=True, null=True)
uf = models.SmallIntegerField(blank=True, null=True)
estado = models.CharField(max_length=2, blank=True, null=True)
data_imagem = models.DateTimeField(blank=True, null=True)
geom = models.GeometryField(blank=True, null=True)
centroide = models.GeometryField(blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
estagio = models.CharField(max_length=50, blank=True, null=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
dominio_pi = models.CharField(max_length=255, blank=True, null=True)
dominio_us = models.CharField(max_length=255, blank=True, null=True)
dominio_ti = models.CharField(max_length=255, blank=True, null=True)
dominio_ap = models.CharField(max_length=255, blank=True, null=True)
dominio_as = models.CharField(max_length=255, blank=True, null=True)
dominio_fp = models.CharField(max_length=255, blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_alerta_deter' % settings.SCHEMA
managed = False
except AttributeError:
pass
def __str__(self):
return "mes: %s, ano: %s, mesid: %s" % (self.mes, self.ano, self.mesid)
class DailyAlertaLandsat(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
area_km2 = models.DecimalField(max_digits=38, decimal_places=2, blank=True, null=True)
dominio = models.CharField(max_length=200, blank=True, null=True)
tipo = models.CharField(max_length=15, blank=True, null=True)
uf = models.SmallIntegerField(blank=True, null=True)
estado = models.CharField(max_length=2, blank=True, null=True)
data_imagem = models.DateTimeField(blank=True, null=True)
geom = models.GeometryField(blank=True, null=True)
centroide = models.GeometryField(blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
estagio = models.CharField(max_length=50, blank=True, null=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_alerta_indicar' % settings.SCHEMA
managed = False
except AttributeError:
pass
class DailyAlertaDeterQualif(models.Model):
objectid = models.AutoField(primary_key=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
mes_ano = models.CharField(max_length=6, blank=True, null=True)
cicatriz_fogo = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
corte_raso_deter = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
degradacao_deter = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
alta = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
leve = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
moderada = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
falso_positivo = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
nao_avaliado = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
deter_total = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
total_avaliado = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
porc_area_avaliada = models.SmallIntegerField(blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_deter_qualificado' % settings.SCHEMA
managed = False
except AttributeError:
pass
def __str__(self):
return str(self.mes) + "/" + str(self.ano)
class PublicAlertaDeterQualif(models.Model):
objectid = models.AutoField(primary_key=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
mes_ano = models.CharField(max_length=6, blank=True, null=True)
cicatriz_fogo = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
corte_raso_deter = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
degradacao_deter = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
alta = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
leve = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
moderada = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
falso_positivo = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
nao_avaliado = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
deter_total = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
total_avaliado = models.DecimalField(max_digits=6, decimal_places=2, blank=True, null=True)
porc_area_avaliada = models.SmallIntegerField(blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_publica_deter_qualificado' % settings.SCHEMA
managed = False
except AttributeError:
pass
def __str__(self):
return str(self.mes) + '/' + str(self.ano)
class PublicAlertaDeter(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
area_km2 = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True)
area_ha = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True)
municipio = models.CharField(max_length=200, blank=True, null=True)
dominio = models.CharField(max_length=200, blank=True, null=True)
tipo = models.CharField(max_length=15, blank=True, null=True)
quinzena = models.CharField(max_length=5, blank=True, null=True)
id_des = models.CharField(unique=True, max_length=16, blank=True, null=True)
ai = models.IntegerField(blank=True, null=True)
tei = models.IntegerField(blank=True, null=True)
processo = models.CharField(max_length=20, blank=True, null=True)
url = models.CharField(max_length=200, blank=True, null=True)
vistoria = models.CharField(max_length=100, blank=True, null=True)
resp_vistoria = models.CharField(max_length=150, blank=True, null=True)
longitude = models.CharField(max_length=17, blank=True, null=True)
latitude = models.CharField(max_length=17, blank=True, null=True)
uf = models.SmallIntegerField(blank=True, null=True)
estado = models.CharField(max_length=2, blank=True, null=True)
obs = models.CharField(max_length=250, blank=True, null=True)
id_tablet = models.CharField(max_length=10, blank=True, null=True)
data_vist = models.CharField(max_length=50, blank=True, null=True)
globalid = models.CharField(max_length=50, blank=True, null=True)
dado_final = models.CharField(max_length=1, blank=True, null=True)
estagio = models.CharField(max_length=50, blank=True, null=True)
data_imagem = models.DateTimeField(blank=True, null=True)
geom = models.GeometryField(blank=True, null=True)
veg_sec = models.CharField(max_length=100, blank=True, null=True)
periodo_prodes = models.CharField(max_length=10, blank=True, null=True)
mesid = models.TextField(blank=True, null=True)
dominio_pi = models.CharField(max_length=255, blank=True, null=True)
dominio_us = models.CharField(max_length=255, blank=True, null=True)
dominio_ti = models.CharField(max_length=255, blank=True, null=True)
dominio_ap = models.CharField(max_length=255, blank=True, null=True)
dominio_as = models.CharField(max_length=255, blank=True, null=True)
dominio_fp = models.CharField(max_length=255, blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"vw_publica_alerta_deter_por_periodo' % settings.SCHEMA
managed = False
except AttributeError:
pass
class TaxaProdes(models.Model):
ano_prodes = models.CharField(max_length=9, blank=True, null=False, primary_key=True)
ac = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
am = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
ap = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
ma = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
mt = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
pa = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
ro = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
rr = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
to = models.DecimalField(max_digits=7, decimal_places=2, blank=True, null=False)
class Meta:
managed = False
db_table = 'public\".\"taxa_prodes'
def total(self):
return self.ac + self.am + self.ap + self.ma + self.mt + self.pa + self.ro + self.rr + self.to
def attributes(self):
return [p for p in dir(self) if len(p) == 2 and not p == 'pk']
def __str__(self):
return self.ano_prodes
class TaxaNuvens(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
uf = models.CharField(max_length=2, blank=True, null=True)
area_km2 = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
porc_area_km2 = models.DecimalField(max_digits=2, decimal_places=0, blank=True, null=True)
dat_cadastro = models.DateTimeField(blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"taxa_nuvem' % settings.SCHEMA
managed = False
except AttributeError:
pass
def periodo_prodes(self):
if get_reverse_month(self.mes) > 7:
return str(self.ano) + "-" + str(int(self.ano) + 1)
else:
return str(int(self.ano) - 1) + "-" + str(self.ano)
def __str__(self):
return str(self.mes) + "/" + str(self.ano) + "('" + str(self.porc_area_km2) + "')"
class TaxaNuvensAml(models.Model):
objectid = models.AutoField(primary_key=True)
data_src = models.DateTimeField(blank=True, null=True)
f_area = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True)
porc_area_km2 = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True, db_column='percent')
mes = models.TextField(blank=True, null=True, db_column='mes_maiusc')
ano = models.SmallIntegerField(blank=True, null=True)
# mes_convert = get_reverse_month(str(mes))
class Meta:
try:
db_table = '%s\".\"vw_taxa_nuvem_aml' % settings.SCHEMA
managed = False
except AttributeError:
pass
# def mes(self):
# return get_reverse_month(self.mes_maiusc)
# def porc_area_km2(self):
# return round(self.percent * 100)
def prodes_filter(queryset, year, month):
monthList = ['01','02','03','04','05','06','07','08','09','10','11','12']
if month > 7:
return queryset.filter(ano__gte=year - 1, ano__lte=year + 1)
class CruzamentoAlerta(models.Model):
objectid = models.AutoField(primary_key=True)
mes = models.CharField(max_length=10, blank=True, null=True)
ano = models.SmallIntegerField(blank=True, null=True)
area_km2 = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True)
area_ha = models.DecimalField(max_digits=38, decimal_places=8, blank=True, null=True)
municipio = models.CharField(max_length=200, blank=True, null=True)
dominio = models.CharField(max_length=200, blank=True, null=True)
tipo = models.CharField(max_length=15, blank=True, null=True)
quinzena = models.CharField(max_length=5, blank=True, null=True)
id_des = models.CharField(unique=True, max_length=16, blank=True, null=True)
ai = models.IntegerField(blank=True, null=True)
tei = models.IntegerField(blank=True, null=True)
processo = models.CharField(max_length=20, blank=True, null=True)
url = models.CharField(max_length=200, blank=True, null=True)
vistoria = models.CharField(max_length=100, blank=True, null=True)
resp_vistoria = models.CharField(max_length=150, blank=True, null=True)
longitude = models.CharField(max_length=17, blank=True, null=True)
latitude = models.CharField(max_length=17, blank=True, null=True)
uf = models.SmallIntegerField(blank=True, null=True)
estado = models.CharField(max_length=2, blank=True, null=True)
obs = models.CharField(max_length=250, blank=True, null=True)
id_tablet = models.CharField(max_length=10, blank=True, null=True)
data_vist = models.CharField(max_length=50, blank=True, null=True)
globalid = models.CharField(max_length=50, blank=True, null=True)
dado_final = models.CharField(max_length=1, blank=True, null=True)
estagio = models.CharField(max_length=50, blank=True, null=True)
data_imagem = models.DateTimeField(blank=True, null=True)
geom = models.GeometryField(blank=True, null=True)
veg_sec = models.CharField(max_length=100, blank=True, null=True)
dominio_pi = models.CharField(max_length=255, blank=True, null=True)
dominio_us = models.CharField(max_length=255, blank=True, null=True)
dominio_ti = models.CharField(max_length=255, blank=True, null=True)
dominio_ap = models.CharField(max_length=255, blank=True, null=True)
dominio_as = models.CharField(max_length=255, blank=True, null=True)
dominio_fp = models.CharField(max_length=255, blank=True, null=True)
class Meta:
try:
db_table = '%s\".\"alerta' % settings.SCHEMA
managed = False
except AttributeError:
pass | 50.850746 | 117 | 0.715057 | 2,378 | 17,035 | 4.975189 | 0.10513 | 0.132364 | 0.191193 | 0.235652 | 0.881836 | 0.873214 | 0.864339 | 0.858254 | 0.827403 | 0.824106 | 0 | 0.024272 | 0.163193 | 17,035 | 335 | 118 | 50.850746 | 0.805682 | 0.037981 | 0 | 0.783394 | 1 | 0 | 0.019971 | 0.013314 | 0 | 0 | 0 | 0 | 0 | 1 | 0.032491 | false | 0.032491 | 0.01444 | 0.025271 | 0.815884 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
08b6644228af778db9585bcca30c4666626baf3d | 418 | py | Python | terrascript/provider/fakewebservices.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 507 | 2017-07-26T02:58:38.000Z | 2022-01-21T12:35:13.000Z | terrascript/provider/fakewebservices.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 135 | 2017-07-20T12:01:59.000Z | 2021-10-04T22:25:40.000Z | terrascript/provider/fakewebservices.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 81 | 2018-02-20T17:55:28.000Z | 2022-01-31T07:08:40.000Z | # terrascript/provider/fakewebservices.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:16:12 UTC)
#
# For imports without namespace, e.g.
#
# >>> import terrascript.provider.fakewebservices
#
# instead of
#
# >>> import terrascript.provider.hashicorp.fakewebservices
#
# This is only available for 'official' and 'partner' providers.
from terrascript.provider.hashicorp.fakewebservices import *
| 27.866667 | 73 | 0.767943 | 49 | 418 | 6.55102 | 0.714286 | 0.23676 | 0.211838 | 0.267913 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032698 | 0.12201 | 418 | 14 | 74 | 29.857143 | 0.841962 | 0.791866 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3ebf2201c30fbf06d422b2f7840bac93bee8c3bf | 293 | py | Python | python3/arr-mathematics/sol.py | wahoyuz/HackerRank | f342de4b95c97458b11dfa1fe3dca52f176f627e | [
"MIT"
] | null | null | null | python3/arr-mathematics/sol.py | wahoyuz/HackerRank | f342de4b95c97458b11dfa1fe3dca52f176f627e | [
"MIT"
] | null | null | null | python3/arr-mathematics/sol.py | wahoyuz/HackerRank | f342de4b95c97458b11dfa1fe3dca52f176f627e | [
"MIT"
] | null | null | null | import numpy
if __name__=="__main__":
N,M=map(int, input().split())
A=numpy.array([list(map(int, input().split())) for n in range(N)])
B=numpy.array([list(map(int, input().split())) for n in range(N)])
print(A + B)
print(A - B)
print(A * B)
print(A // B)
print(A % B)
print(A ** B) | 20.928571 | 67 | 0.59727 | 55 | 293 | 3.036364 | 0.345455 | 0.215569 | 0.251497 | 0.359281 | 0.754491 | 0.754491 | 0.754491 | 0.754491 | 0.754491 | 0.754491 | 0 | 0 | 0.16041 | 293 | 14 | 68 | 20.928571 | 0.678862 | 0 | 0 | 0 | 0 | 0 | 0.027211 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0.545455 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
f5cd2e8e8a32ca9b09ded0821d84218d80b4b94f | 160 | py | Python | chinese/admin.py | DiegoBrian/Flashcards | 1d80dbe2943ce33964095ea856f251ab5c1725d2 | [
"MIT"
] | 1 | 2018-11-21T11:17:51.000Z | 2018-11-21T11:17:51.000Z | chinese/admin.py | DiegoBrian/Flashcards | 1d80dbe2943ce33964095ea856f251ab5c1725d2 | [
"MIT"
] | 15 | 2018-12-07T10:50:14.000Z | 2022-03-11T23:33:32.000Z | chinese/admin.py | DiegoBrian/Flashcards | 1d80dbe2943ce33964095ea856f251ab5c1725d2 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import *
admin.site.register(Sentence)
admin.site.register(User_Sentence)
admin.site.register(User_TimeSettings) | 22.857143 | 38 | 0.83125 | 22 | 160 | 5.954545 | 0.5 | 0.206107 | 0.389313 | 0.381679 | 0.442748 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075 | 160 | 7 | 38 | 22.857143 | 0.885135 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.4 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
eb190c811a6551b0e7f3f91852c72fc8296bbfdb | 9,320 | py | Python | ubelt/tests/test_format.py | russelldj/ubelt | c82ae8c8ead66f0c406d28d680430f7df00bb32b | [
"Apache-2.0"
] | null | null | null | ubelt/tests/test_format.py | russelldj/ubelt | c82ae8c8ead66f0c406d28d680430f7df00bb32b | [
"Apache-2.0"
] | null | null | null | ubelt/tests/test_format.py | russelldj/ubelt | c82ae8c8ead66f0c406d28d680430f7df00bb32b | [
"Apache-2.0"
] | null | null | null | import ubelt as ub
def test_newlines():
import ubelt as ub
dict_ = {
'k1': [[1, 2, 3], [4, 5, 6]],
'k2': [[1, 2, 3], [4, 5, 6]],
}
assert ub.repr2(dict_, nl=1) != ub.repr2(dict_, nl=2)
assert ub.repr2(dict_, nl=2) != ub.repr2(dict_, nl=3)
assert ub.repr2(dict_, nl=3) == ub.repr2(dict_, nl=4)
assert ub.repr2(dict_, nl=1) == ub.codeblock(
'''
{
'k1': [[1, 2, 3], [4, 5, 6]],
'k2': [[1, 2, 3], [4, 5, 6]],
}
''')
assert ub.repr2(dict_, nl=2) == ub.codeblock(
'''
{
'k1': [
[1, 2, 3],
[4, 5, 6],
],
'k2': [
[1, 2, 3],
[4, 5, 6],
],
}
''')
def test_compact_brace():
import ubelt as ub
def _nest(d, w):
if d == 0:
return {}
else:
return {'n{}'.format(d): _nest(d - 1, w + 1),
'mm{}'.format(d): _nest(d - 1, w + 1)}
dict_ = _nest(d=3, w=1)
result = ub.repr2(dict_, nl=4, precision=2, compact_brace=0)
print(result)
assert result == ub.codeblock(
'''
{
'mm3': {
'mm2': {
'mm1': {},
'n1': {},
},
'n2': {
'mm1': {},
'n1': {},
},
},
'n3': {
'mm2': {
'mm1': {},
'n1': {},
},
'n2': {
'mm1': {},
'n1': {},
},
},
}
''')
result = ub.repr2(dict_, nl=4, precision=2, compact_brace=1)
print(result)
assert result == ub.codeblock(
'''
{'mm3': {'mm2': {'mm1': {},
'n1': {},},
'n2': {'mm1': {},
'n1': {},},},
'n3': {'mm2': {'mm1': {},
'n1': {},},
'n2': {'mm1': {},
'n1': {},},},}
''')
def test_empty():
import ubelt as ub
assert ub.repr2(list()) == '[]'
assert ub.repr2(dict()) == '{}'
assert ub.repr2(set()) == '{}'
assert ub.repr2(tuple()) == '()'
assert ub.repr2(dict(), explicit=1) == 'dict()'
# Even when no braces are no, still include them when input is empty
assert ub.repr2(list(), nobr=1) == '[]'
assert ub.repr2(dict(), nobr=1) == '{}'
assert ub.repr2(set(), nobr=1) == '{}'
assert ub.repr2(tuple(), nobr=1) == '()'
assert ub.repr2(dict(), nobr=1, explicit=1) == 'dict()'
def test_list_of_numpy():
import numpy as np
import ubelt as ub
data = [
np.zeros((3, 3), dtype=np.int32),
np.zeros((3, 10), dtype=np.int32),
np.zeros((3, 20), dtype=np.int32),
np.zeros((3, 30), dtype=np.int32),
]
string = ub.repr2(data, nl=2)
print(string)
assert repr(data) == repr(eval(string)), 'should produce eval-able code'
assert string == ub.codeblock(
'''
[
np.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
]
''')
string = ub.repr2(data, max_line_width=10000, nl=2)
print(string)
assert string == ub.codeblock(
'''
[
np.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
]
''')
string = ub.repr2(data, nl=1)
print(string)
assert string == ub.codeblock(
'''
[
np.array([[0, 0, 0],[0, 0, 0],[0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
]
'''
)
string = ub.repr2(data, nl=0)
print(string)
assert string == ub.codeblock(
'''
[np.array([[0, 0, 0],[0, 0, 0],[0, 0, 0]], dtype=np.int32), np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32), np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32), np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0],[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32)]
'''
)
def test_dict_of_numpy():
import numpy as np
data = ub.odict(zip(
['one', 'two', 'three', 'four'],
[
np.zeros((3, 3), dtype=np.int32),
np.zeros((3, 10), dtype=np.int32),
np.zeros((3, 20), dtype=np.int32),
np.zeros((3, 30), dtype=np.int32),
]))
string = ub.repr2(data, nl=2)
print(string)
assert string == ub.codeblock(
'''
{
'one': np.array([[0, 0, 0],
[0, 0, 0],
[0, 0, 0]], dtype=np.int32),
'two': np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
'three': np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
'four': np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.int32),
}
''')
def test_numpy_scalar_precision():
import numpy as np
text = ub.repr2(np.float32(3.333333), precision=2)
assert text == '3.33'
def test_repr2_tuple_keys():
data = {
('one', 'two'): 100,
('three', 'four'): 200,
}
string = ub.repr2(data)
print(string)
string == ub.codeblock(
'''
{
(
'one',
'two',
): 100,
(
'three',
'four',
): 200,
}
''')
data = {
('one', 'two'): 100,
('three', 'four'): 200,
}
string = ub.repr2(data, sk=1)
print(string)
string == ub.codeblock(
'''
{
('one', 'two'): 100,
('three', 'four'): 200,
}
''')
| 37.131474 | 700 | 0.342918 | 1,565 | 9,320 | 2.019169 | 0.060064 | 0.585443 | 0.859177 | 1.120253 | 0.822152 | 0.782595 | 0.768671 | 0.74462 | 0.721203 | 0.721203 | 0 | 0.223174 | 0.424034 | 9,320 | 250 | 701 | 37.28 | 0.365499 | 0.007082 | 0 | 0.403846 | 0 | 0 | 0.037049 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.076923 | false | 0 | 0.076923 | 0 | 0.173077 | 0.086538 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
eb3adceb9ae1eda4fa45afab96b1823e9f19f47f | 235 | py | Python | descriptive_fruit/generator.py | cncplyr/descriptive_fruit | 5893087899141d96dbc1b79f7ebd270f906dcce8 | [
"Apache-2.0"
] | 1 | 2017-11-06T14:00:29.000Z | 2017-11-06T14:00:29.000Z | descriptive_fruit/generator.py | cncplyr/descriptive_fruit | 5893087899141d96dbc1b79f7ebd270f906dcce8 | [
"Apache-2.0"
] | 1 | 2017-11-30T09:50:57.000Z | 2017-11-30T09:50:57.000Z | descriptive_fruit/generator.py | cncplyr/descriptive_fruit | 5893087899141d96dbc1b79f7ebd270f906dcce8 | [
"Apache-2.0"
] | null | null | null | from random import choice
from descriptive_fruit.adjective import ADJECTIVES
from descriptive_fruit.fruit import FRUITS
def generate(separator=' '):
return choice(ADJECTIVES) + separator + choice(FRUITS).replace(' ', separator)
| 26.111111 | 82 | 0.787234 | 27 | 235 | 6.777778 | 0.518519 | 0.163934 | 0.218579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12766 | 235 | 8 | 83 | 29.375 | 0.892683 | 0 | 0 | 0 | 1 | 0 | 0.008511 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.6 | 0.2 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
de1d9a9e45a9503e323fb6214edee1143b1ed62e | 31,760 | py | Python | sagan/layers.py | DeepTrickLab/Self-Attention-GAN | a6fa4f0d7916e0721c9797adec1ab5d9c6b76636 | [
"MIT"
] | 2 | 2020-09-18T03:45:24.000Z | 2021-11-18T11:01:52.000Z | sagan/layers.py | DeepTrickLab/Self-Attention-GAN | a6fa4f0d7916e0721c9797adec1ab5d9c6b76636 | [
"MIT"
] | null | null | null | sagan/layers.py | DeepTrickLab/Self-Attention-GAN | a6fa4f0d7916e0721c9797adec1ab5d9c6b76636 | [
"MIT"
] | null | null | null | import tensorflow as tf
from tensorflow.keras import layers
class SpectralNormalization(tf.keras.layers.Wrapper):
"""This wrapper reparameterizes a layer by decoupling the weight's
magnitude and direction.
This speeds up convergence by improving the
conditioning of the optimization problem.
Weight Normalization: A Simple Reparameterization to Accelerate
Training of Deep Neural Networks: https://arxiv.org/abs/1602.07868
Tim Salimans, Diederik P. Kingma (2016)
WeightNormalization wrapper works for keras and tf layers.
```python
net = WeightNormalization(
tf.keras.layers.Conv2D(2, 2, activation='relu'),
input_shape=(32, 32, 3),
data_init=True)(x)
net = WeightNormalization(
tf.keras.layers.Conv2D(16, 5, activation='relu'),
data_init=True)(net)
net = WeightNormalization(
tf.keras.layers.Dense(120, activation='relu'),
data_init=True)(net)
net = WeightNormalization(
tf.keras.layers.Dense(n_classes),
data_init=True)(net)
```
Arguments:
layer: a layer instance.
data_init: If `True` use data dependent variable initialization
Raises:
ValueError: If not initialized with a `Layer` instance.
ValueError: If `Layer` does not contain a `kernel` of weights
NotImplementedError: If `data_init` is True and running graph execution
"""
def __init__(self, layer, data_init=True, **kwargs):
super(SpectralNormalization, self).__init__(layer, **kwargs)
self.data_init = data_init
self._track_trackable(layer, name='layer')
self._init_critical_section = tf.CriticalSection(name='init_mutex')
self.is_rnn = isinstance(self.layer, tf.keras.layers.RNN)
if self.data_init and self.is_rnn:
logging.warning(
"WeightNormalization: Using `data_init=True` with RNNs "
"is advised against by the paper. Use `data_init=False`.")
def build(self, input_shape):
"""Build `Layer`"""
input_shape = tf.TensorShape(input_shape)
self.input_spec = tf.keras.layers.InputSpec(
shape=[None] + input_shape[1:])
if not self.layer.built:
self.layer.build(input_shape)
kernel_layer = self.layer.cell if self.is_rnn else self.layer
if not hasattr(kernel_layer, 'kernel'):
raise ValueError('`WeightNormalization` must wrap a layer that'
' contains a `kernel` for weights')
if self.is_rnn:
kernel = kernel_layer.recurrent_kernel
else:
kernel = kernel_layer.kernel
# The kernel's filter or unit dimension is -1
self.layer_depth = int(kernel.shape[-1])
self.kernel_norm_axes = list(range(kernel.shape.rank - 1))
self.g = self.add_weight(
name='g',
shape=(self.layer_depth,),
initializer='ones',
dtype=kernel.dtype,
trainable=True,
synchronization=tf.VariableSynchronization.AUTO,
aggregation=tf.compat.v1.VariableAggregation.MEAN
)
self.v = kernel
self._initialized = self.add_weight(
name='initialized',
shape=None,
initializer='zeros',
dtype=tf.dtypes.bool,
trainable=False)
if self.data_init:
# Used for data initialization in self._data_dep_init.
with tf.name_scope('data_dep_init'):
layer_config = tf.keras.layers.serialize(self.layer)
layer_config['config']['trainable'] = False
self._naked_clone_layer = tf.keras.layers.deserialize(
layer_config)
self._naked_clone_layer.build(input_shape)
self._naked_clone_layer.set_weights(self.layer.get_weights())
if not self.is_rnn:
self._naked_clone_layer.activation = None
self.built = True
def call(self, inputs):
"""Call `Layer`"""
def _do_nothing():
return tf.identity(self.g)
def _update_weights():
# Ensure we read `self.g` after _update_weights.
with tf.control_dependencies(self._initialize_weights(inputs)):
return tf.identity(self.g)
g = self._init_critical_section.execute(lambda: tf.cond(
self._initialized, _do_nothing, _update_weights))
with tf.name_scope('compute_weights'):
# Replace kernel by normalized weight variable.
kernel = tf.nn.l2_normalize(self.v, axis=self.kernel_norm_axes) * g
if self.is_rnn:
self.layer.cell.recurrent_kernel = kernel
update_kernel = tf.identity(self.layer.cell.recurrent_kernel)
else:
self.layer.kernel = kernel
update_kernel = tf.identity(self.layer.kernel)
# Ensure we calculate result after updating kernel.
with tf.control_dependencies([update_kernel]):
outputs = self.layer(inputs)
return outputs
def compute_output_shape(self, input_shape):
return tf.TensorShape(
self.layer.compute_output_shape(input_shape).as_list())
def _initialize_weights(self, inputs):
"""Initialize weight g.
The initial value of g could either from the initial value in v,
or by the input value if self.data_init is True.
"""
with tf.control_dependencies([
tf.debugging.assert_equal( # pylint: disable=bad-continuation
self._initialized,
False,
message='The layer has been initialized.')
]):
if self.data_init:
assign_tensors = self._data_dep_init(inputs)
else:
assign_tensors = self._init_norm()
assign_tensors.append(self._initialized.assign(True))
return assign_tensors
def _init_norm(self):
"""Set the weight g with the norm of the weight vector."""
with tf.name_scope('init_norm'):
v_flat = tf.reshape(self.v, [-1, self.layer_depth])
v_norm = tf.linalg.norm(v_flat, axis=0)
g_tensor = self.g.assign(tf.reshape(v_norm, (self.layer_depth,)))
return [g_tensor]
def _data_dep_init(self, inputs):
"""Data dependent initialization."""
with tf.name_scope('data_dep_init'):
#print(type(self.g))
#print(dir(self.g))
#print(self.g.__class__)
#print(self.g.__name__)
# Generate data dependent init values
x_init = self._naked_clone_layer(inputs)
data_norm_axes = list(range(x_init.shape.rank - 1))
m_init, v_init = tf.nn.moments(x_init, data_norm_axes)
scale_init = 1. / tf.math.sqrt(v_init + 1e-10)
# RNNs have fused kernels that are tiled
# Repeat scale_init to match the shape of fused kernel
# Note: This is only to support the operation,
# the paper advises against RNN+data_dep_init
if scale_init.shape[0] != self.g.shape[0]:
rep = int(self.g.shape[0] / scale_init.shape[0])
scale_init = tf.tile(scale_init, [rep])
# Assign data dependent init values
g_tensor = self.g.assign(self.g * scale_init)
if hasattr(self.layer, 'bias') and self.layer.bias is not None:
bias_tensor = self.layer.bias.assign(-m_init * scale_init)
return [g_tensor, bias_tensor]
else:
return [g_tensor]
def get_config(self):
config = {'data_init': self.data_init}
base_config = super(WeightNormalization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def remove(self):
kernel = tf.Variable(
tf.nn.l2_normalize(self.v, axis=self.kernel_norm_axes) * self.g,
name='recurrent_kernel' if self.is_rnn else 'kernel')
if self.is_rnn:
self.layer.cell.recurrent_kernel = kernel
else:
self.layer.kernel = kernel
return self.layer
"""class SpectralNormalization(tf.keras.layers.Wrapper):
\"""This wrapper is modified from
https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/layers/wrappers.py
Arguments:
layer: a layer instance.
data_init: If `True` use data dependent variable initialization
Raises:
ValueError: If not initialized with a `Layer` instance.
ValueError: If `Layer` does not contain a `kernel` of weights
NotImplementedError: If `data_init` is True and running graph execution
\"""
def __init__(self, layer, data_init=True, **kwargs):
super(SpectralNormalization, self).__init__(layer, **kwargs)
self.data_init = data_init
self._track_trackable(layer, name='layer')
self._init_critical_section = tf.CriticalSection(name='init_mutex')
self.is_rnn = isinstance(self.layer, tf.keras.layers.RNN)
if self.data_init and self.is_rnn:
logging.warning(
"WeightNormalization: Using `data_init=True` with RNNs "
"is advised against by the paper. Use `data_init=False`.")
def build(self, input_shape):
\"""Build `Layer`\"""
input_shape = tf.TensorShape(input_shape)
self.input_spec = tf.keras.layers.InputSpec(
shape=[None] + input_shape[1:])
if not self.layer.built:
self.layer.build(input_shape)
kernel_layer = self.layer.cell if self.is_rnn else self.layer
if not hasattr(kernel_layer, 'kernel'):
raise ValueError('`WeightNormalization` must wrap a layer that'
' contains a `kernel` for weights')
if self.is_rnn:
kernel = kernel_layer.recurrent_kernel
else:
kernel = kernel_layer.kernel
# The kernel's filter or unit dimension is -1
self.layer_depth = int(kernel.shape[-1])
self.temporal_dim = int(tf.reshape(kernel, [-1, self.layer_depth]).shape[0])
self.kernel_norm_axes = list(range(kernel.shape.rank - 1))
self._u = self.add_weight(
name='u',
shape=(1,self.layer_depth),
initializer=tf.keras.initializers.GlorotNormal,
dtype=kernel.dtype,
trainable=True)
self._v = self.add_weight(
name='g',
shape=(1,self.temporal_dim),
initializer=tf.keras.initializers.GlorotNormal,
dtype=kernel.dtype,
trainable=True)
self._u = tf.math.l2_normalize(self._u, axis=1)
self._v = tf.math.l2_normalize(self._v, axis=1)
self.v = kernel
\"""self.g = self.add_weight(
name='g',
shape=(self.layer_depth,),
initializer='ones',
dtype=kernel.dtype,
trainable=True)
self._initialized = self.add_weight(
name='initialized',
shape=None,
initializer='zeros',
dtype=tf.dtypes.bool,
trainable=False)
if self.data_init:
# Used for data initialization in self._data_dep_init.
with tf.name_scope('data_dep_init'):
layer_config = tf.keras.layers.serialize(self.layer)
layer_config['config']['trainable'] = False
self._naked_clone_layer = tf.keras.layers.deserialize(
layer_config)
self._naked_clone_layer.build(input_shape)
self._naked_clone_layer.set_weights(self.layer.get_weights())
if not self.is_rnn:
self._naked_clone_layer.activation = None\"""
self.built = True
def call(self, inputs):
\"""Call `Layer`\"""
\"""def _do_nothing():
return tf.identity(self.g)
def _update_weights():
# Ensure we read `self.g` after _update_weights.
with tf.control_dependencies(self._initialize_weights(inputs)):
return tf.identity(self.g)
g = self._init_critical_section.execute(lambda: tf.cond(
self._initialized, _do_nothing, _update_weights))\"""
with tf.name_scope('compute_weights'):
# Replace kernel by spectrally normalized weight.
#with tf.init_scope():
kernel = self.spectral_normalize()
if self.is_rnn:
self.layer.cell.recurrent_kernel = kernel
update_kernel = tf.identity(self.layer.cell.recurrent_kernel)
else:
self.layer.kernel = kernel
update_kernel = tf.identity(self.layer.kernel)
# Ensure we calculate result after updating kernel.
with tf.control_dependencies([update_kernel]):
outputs = self.layer(inputs)
return outputs
def spectral_normalize(self):
kernel_mat = tf.reshape(self.v, [self.layer_depth, self.temporal_dim])
self._v = tf.math.l2_normalize(tf.matmul(self._u, kernel_mat), axis=1)
update_v = tf.identity(self._v)
with tf.control_dependencies([update_v]):
self._u = tf.math.l2_normalize(tf.matmul(self._v, tf.transpose(kernel_mat)), axis=1)
update_u = tf.identity(self._u)
with tf.control_dependencies([update_u]):
sigma = tf.reduce_sum(tf.matmul(self._u, kernel_mat) * self._v)
return self.v / sigma
def compute_output_shape(self, input_shape):
return tf.TensorShape(
self.layer.compute_output_shape(input_shape).as_list())
\"""def _initialize_weights(self, inputs):
#Initialize weight g.
#The initial value of g could either from the initial value in v,
#or by the input value if self.data_init is True.
with tf.control_dependencies([
tf.debugging.assert_equal( # pylint: disable=bad-continuation
self._initialized,
False,
message='The layer has been initialized.')
]):
if self.data_init:
assign_tensors = self._data_dep_init(inputs)
else:
assign_tensors = self._init_norm()
assign_tensors.append(self._initialized.assign(True))
return assign_tensors
def _init_norm(self):
#Set the weight g with the norm of the weight vector.
with tf.name_scope('init_norm'):
v_flat = tf.reshape(self.v, [-1, self.layer_depth])
v_norm = tf.linalg.norm(v_flat, axis=0)
g_tensor = self.g.assign(tf.reshape(v_norm, (self.layer_depth,)))
return [g_tensor]
def _data_dep_init(self, inputs):
#Data dependent initialization.
with tf.name_scope('data_dep_init'):
# Generate data dependent init values
x_init = self._naked_clone_layer(inputs)
data_norm_axes = list(range(x_init.shape.rank - 1))
m_init, v_init = tf.nn.moments(x_init, data_norm_axes)
scale_init = 1. / tf.math.sqrt(v_init + 1e-10)
# RNNs have fused kernels that are tiled
# Repeat scale_init to match the shape of fused kernel
# Note: This is only to support the operation,
# the paper advises against RNN+data_dep_init
if scale_init.shape[0] != self.g.shape[0]:
rep = int(self.g.shape[0] / scale_init.shape[0])
scale_init = tf.tile(scale_init, [rep])
# Assign data dependent init values
g_tensor = self.g.assign(self.g * scale_init)
if hasattr(self.layer, 'bias') and self.layer.bias is not None:
bias_tensor = self.layer.bias.assign(-m_init * scale_init)
return [g_tensor, bias_tensor]
else:
return [g_tensor]\"""
def get_config(self):
config = {'data_init': self.data_init}
base_config = super(WeightNormalization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def remove(self):
kernel = tf.Variable(
tf.nn.l2_normalize(self.v, axis=self.kernel_norm_axes) * self.g,
name='recurrent_kernel' if self.is_rnn else 'kernel')
if self.is_rnn:
self.layer.cell.recurrent_kernel = kernel
else:
self.layer.kernel = kernel
return self.layer
\"""
def l2normalize(v, eps=1e-12):
return tf.math.divide(v,(tf.norm(v) + eps))
class SpectralNormalization(layers.Layer):
\""" Paper: https://openreview.net/forum?id=B1QRgziT-
source: https://github.com/pfnet-research/sngan_projection
\"""
def __init__(self, module, name="weights", Ip=1, factor=None):
super(SpectralNormalization, self).__init__()
self.module = module
self.weight_name = name
if not Ip >= 1:
raise ValueError("The number of power iterations should be positive integer")
self.Ip = Ip
self.factor = factor
def _check_param(self):
try:
u = getattr(self, "u")
v = getattr(self, "v")
return True
except AttributeError:
return False
def _make_param(self):
W = getattr(self.module, self.weight_name)[0]
height = W.shape[-1]
width = tf.reshape(W, shape=(height, -1)).shape[1]
u = tf.random.normal(shape=[1, height])
v = tf.random.normal(shape=[1, width])
self.u = l2normalize(u)
self.v = l2normalize(v)
def build(self, input_shape):
self.module.build(input_shape)
if not self._check_param():
self._make_param()
def call(self, x, training=None):
if training:
self.update_uv()
return self.module.call(x)
# # @tf.function
def update_uv(self):
\""" Spectrally Normalized Weight
\"""
W = getattr(self.module, self.weight_name)[0]
with tf.init_scope():
W_mat = tf.reshape(W, [W.shape[-1], -1])
for _ in range(self.Ip):
self.v = l2normalize(tf.matmul(self.u, W_mat))
self.u = l2normalize(tf.matmul(self.v, tf.transpose(W_mat)))
sigma = tf.reduce_sum(tf.matmul(self.u, W_mat) * self.v)
if self.factor:
sigma = sigma / self.factor
W.assign(W / sigma)
"""
class SNConv2D(tf.keras.layers.Conv2D):
"""Paper: https://openreview.net/forum?id=B1QRgziT-
source: https://github.com/pfnet-research/sngan_projection
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
Ip=1,
factor=None,
input_shape=None,
**kwargs):
super(SNConv2D, self).__init__(
filters,
kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.training = None
self.Ip = Ip
self.factor = factor
def _check_param(self):
try:
u = getattr(self, "u")
v = getattr(self, "v")
return True
except AttributeError:
return False
def _make_param(self):
height = self.w.shape[-1]
width = tf.reshape(self.w, shape=(height, -1)).shape[1]
u = tf.random.normal(shape=[1, height])
v = tf.random.normal(shape=[1, width])
self.u = l2normalize(u)
self.v = l2normalize(v)
def build(self, input_shape):
super(SNConv2D, self).build(input_shape)
self.w = self.add_weight(
name='sn_conv2d_kernel',
shape=self.kernel.shape,
dtype=tf.float32,
initializer='glorot_uniform',
#regularizer=None,
trainable=True,
#constraint=None,
#partitioner=None,
#use_resource=None,
synchronization=tf.VariableSynchronization.AUTO,
aggregation=tf.compat.v1.VariableAggregation.MEAN)
if not self._check_param():
self._make_param()
# @tf.function
def call(self, x, training=None):
"""Applies the convolution layer.
Args:
x (tensor): Input image.
Returns:
tensor: Output of the convolution.
"""
if training:
self.update_wuv()
out = tf.nn.conv2d(
x, self.w, strides=self.strides, padding='SAME')
if self.bias is not None:
out += self.bias
return out
#@tf.function
def update_wuv(self):
with tf.init_scope():
W_mat = tf.reshape(self.w, [self.w.shape[-1], -1])
for _ in range(self.Ip):
self.v = l2normalize(tf.matmul(self.u, W_mat))
self.u = l2normalize(tf.matmul(self.v, tf.transpose(W_mat)))
sigma = tf.reduce_sum(tf.matmul(self.u, W_mat) * self.v)
if self.factor:
sigma = sigma / self.factor
self.w.assign(self.w / sigma)
class SNConv2DTranspose(tf.keras.layers.Conv2DTranspose):
"""Paper: https://openreview.net/forum?id=B1QRgziT-
source: https://github.com/pfnet-research/sngan_projection
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
output_padding=None,
data_format=None,
dilation_rate=(1, 1),
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
Ip=1,
factor=None,
input_shape=None,
**kwargs):
super(SNConv2DTranspose, self).__init__(
filters,
kernel_size,
strides=strides,
output_padding=output_padding,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.Ip = Ip
self.factor = factor
self.training = None
def _check_param(self):
try:
u = getattr(self, "u")
v = getattr(self, "v")
return True
except AttributeError:
return False
def _make_param(self):
height = self.w.shape[-1]
width = tf.reshape(self.w, shape=(height, -1)).shape[1]
u = tf.random.normal(shape=[1, height])
v = tf.random.normal(shape=[1, width])
self.u = l2normalize(u)
self.v = l2normalize(v)
def build(self, input_shape):
super(SNConv2DTranspose, self).build(input_shape)
self.w = self.add_weight(
name='sn_conv2dTranspose_kernel',
shape=self.kernel.shape,
dtype=tf.float32,
initializer='glorot_uniform',
#regularizer=None,
trainable=True,
#constraint=None,
#partitioner=None,
#use_resource=None,
synchronization=tf.VariableSynchronization.AUTO,
aggregation=tf.compat.v1.VariableAggregation.MEAN)
if not self._check_param():
self._make_param()
# @tf.function
def call(self, x, training=None):
b, h, w, c = x.get_shape().as_list()
if training:
self.update_wuv()
if self.padding.lower() == 'same':
nh = h * self.strides[0]
nw = w * self.strides[1]
else:
nh = h + (h - 1) * self.strides[0] + self.w.shape[0] - 1
nw = w + (w - 1) * self.strides[1] + self.w.shape[1] - 1
out = tf.nn.conv2d_transpose(x, self.w, output_shape=[b, nh, nw, self.w.shape[-2]],
strides=self.strides, padding=self.padding.upper())
if self.bias is not None:
out += self.bias
return out
def update_wuv(self):
"""
Spectrally Normalized Weight
"""
with tf.init_scope():
W_mat = tf.reshape(self.w, [self.w.shape[-1], -1])
for _ in range(self.Ip):
self.v = l2normalize(tf.matmul(self.u, W_mat))
self.u = l2normalize(tf.matmul(self.v, tf.transpose(W_mat)))
sigma = tf.reduce_sum(tf.matmul(self.u, W_mat) * self.v)
if self.factor:
sigma = sigma / self.factor
self.w.assign(self.w / sigma)
class SNDense(tf.keras.layers.Dense):
"""Paper: https://openreview.net/forum?id=B1QRgziT-
source: https://github.com/pfnet-research/sngan_projection
"""
def __init__(self,
units,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
Ip=1,
factor=None,
**kwargs):
super(SNDense, self).__init__(units,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
**kwargs)
self.Ip = Ip
self.factor = factor
def _check_param(self):
try:
u = getattr(self, "u")
v = getattr(self, "v")
return True
except AttributeError:
return False
def _make_param(self):
self.w = self.add_weight(
name='sn_dense_kernel',
shape=self.weights[0].shape,
dtype=tf.float32,
initializer='glorot_uniform',
#regularizer=None,
trainable=True,
#constraint=None,
#partitioner=None,
#use_resource=None,
synchronization=tf.VariableSynchronization.AUTO,
aggregation=tf.compat.v1.VariableAggregation.MEAN)
W = self.weights[0]
height = W.shape[-1]
width = tf.reshape(W, shape=(height, -1)).shape[1]
u = tf.random.normal(shape=[1, height])
v = tf.random.normal(shape=[1, width])
self.u = l2normalize(u)
self.v = l2normalize(v)
def build(self, input_shape):
super(SNDense, self).build(input_shape)
if not self._check_param():
self._make_param()
# @tf.function
def call(self, x, training=None):
if training:
with tf.init_scope():
self.update_wuv()
out = tf.matmul(x, self.w)
if self.use_bias:
out += self.bias
return out
def update_wuv(self):
W_mat = tf.reshape(self.w, [self.w.shape[-1], -1])
for _ in range(self.Ip):
self.v = l2normalize(tf.matmul(self.u, W_mat))
self.u = l2normalize(tf.matmul(self.v, tf.transpose(W_mat)))
sigma = tf.reduce_sum(tf.matmul(self.u, W_mat) * self.v)
if self.factor:
sigma = sigma / self.factor
self.w.assign(self.w / sigma)
class AttentionLayer(layers.Layer):
def __init__(self):
super(AttentionLayer, self).__init__()
def build(self, input_shape):
# to scale attention
self.sigma = self.add_weight(shape=(),
initializer='zero',
trainable=True,
name='sigma')
b, w, h, c = input_shape.as_list()
self.conv = []
self.conv.append(layers.Conv2D(c//8, 1, 1))
self.conv.append(layers.Conv2D(c//8, 1, 1))
# self.conv.append(layers.Conv2D(c//2, 1, 1))
self.conv.append(layers.Conv2D(c, 1, 1))
for i, conv in enumerate(self.conv):
#if i==len(self.conv)-1:
# conv.build([b,w,h,c//2])
#else:
conv.build(input_shape)
def call(self, inputs, training=None):
b, w, h, c = inputs.shape.as_list()
location_num = w * h
downsample_num = location_num // 4
query = self.conv[0](inputs)
query = tf.reshape(query, [-1, location_num, c//8])
key = self.conv[1](inputs)
key = layers.MaxPool2D(2,2)(key)
key = tf.reshape(key, [-1, downsample_num, c//8])
key = tf.transpose(key, [0, 2, 1])
atten = tf.matmul(query, key)
atten = tf.nn.softmax(atten, axis=-1) # [location_num, downsample_num]
value = self.conv[2](inputs)
value = layers.MaxPool2D(2,2)(value)
value = tf.reshape(value, [-1, downsample_num, c])
atten_g = tf.matmul(atten, value) # [location_num, c]
atten_g = tf.reshape(atten_g, [-1, w, h, c])
# atten_g = self.conv[3](atten_g)
# return layers.add([(1-self.sigma) * inputs, self.sigma * atten_g])
return layers.add([inputs, self.sigma * atten_g])
| 36.380298 | 96 | 0.564893 | 3,671 | 31,760 | 4.702261 | 0.100245 | 0.02659 | 0.012803 | 0.006372 | 0.843877 | 0.824296 | 0.813521 | 0.800371 | 0.78247 | 0.779458 | 0 | 0.010201 | 0.330195 | 31,760 | 872 | 97 | 36.422018 | 0.80125 | 0.09556 | 0 | 0.773228 | 0 | 0.022047 | 0.179908 | 0.050388 | 0 | 0 | 0 | 0 | 0.00315 | 1 | 0.050394 | false | 0 | 0.00315 | 0.00315 | 0.116535 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
de348776d36fb77294e59b9465e3801d9072da8f | 12,972 | py | Python | simulator/simulator-topologies-fault-tolerance/latency/latency.py | giordano-lucas/DeAI | d389010e371473d0e1262176d30ceb36acef7c5a | [
"Apache-2.0"
] | 44 | 2020-06-25T07:35:39.000Z | 2022-02-18T12:29:45.000Z | simulator/simulator-topologies-fault-tolerance/latency/latency.py | giordano-lucas/DeAI | d389010e371473d0e1262176d30ceb36acef7c5a | [
"Apache-2.0"
] | 152 | 2020-06-23T23:30:15.000Z | 2022-02-25T10:22:30.000Z | simulator/simulator-topologies-fault-tolerance/latency/latency.py | giordano-lucas/DeAI | d389010e371473d0e1262176d30ceb36acef7c5a | [
"Apache-2.0"
] | 11 | 2020-08-11T21:19:49.000Z | 2022-01-30T17:15:31.000Z | import numpy as np
import torch
import sys
sys.path.append('..')
from utils import model_init, optimizer_init, client_update, diffuse_params, average_models, evaluate, create_mixing_matrix
def run_latency(train_loader, test_loader, comm_matrix, num_rounds, epochs, num_clients,
latency_nodes, net='net', optimizer='sgd', lr=0.1):
"""
Runs a decentralized optimization algorithm for the given learning rate for a
number of rounds, over some network. Some nodes send their weights with a one-rounds
latency, for the entire execution. Outputs the accuracies and returns them.
Params:
train_loader (array): the list of all train datasets, one per client
test_loader (array): the list of test datasets, one per client
comm_matrix (numpy.array): the communication matric modeling the network
num_rounds (int): the number of data exchanges between nodes
epochs (int): the number of optimization steps between each communication (minimum 1)
num_clients (int): the number of clients in the network
latency_nodes (array): the list of delayed nodes
net (string): the neural network framework we use
optimizer (string): the chosen optimizer, SGD by default
lr (double): the learning rate for the optimizaion algorithm
Returns:
global_model (nn.Module): the final global neural network averaging all the clients
client_models (array of Net): the list of all the final client neural networks
accs (array): the corresponding accuracies, with the same shape as lrs
"""
accs = []
global_model, client_models = model_init(num_clients, net)
opt = optimizer_init(client_models, lr, optimizer)
loss, test_loss, acc = 0.0, 0.0, 0.0
for r in range(num_rounds):
old_client_models = client_models
# client update
for i in range(num_clients):
loss += client_update(client_models[i], opt[i], train_loader[i], epoch=epochs)
# diffuse params
diffuse_params_latency(client_models, comm_matrix, latency_nodes)
if (r > 0):
diffuse_params_latency(old_client_models, comm_matrix, np.setdiff1d(np.array(range(num_clients)), latency_nodes))
average_models(global_model, client_models)
test_loss, acc = evaluate(global_model, test_loader)
print('%d-th round' % r)
print('average train loss %0.3g | test loss %0.3g | test acc: %0.3f' % (loss / num_clients, test_loss, acc))
accs.append(acc)
return global_model, client_models, accs
def run_latency_changing_topo(train_loader, test_loader, num_rounds, epochs, num_clients,
latency_nodes, net='net', optimizer='sgd', lr=0.1):
"""
Runs a decentralized optimization algorithm for the given learning rate for a
number of rounds, over some network. Some nodes send their weights with a one-rounds
latency, for the entire execution. The network topology evolves over time. Outputs
the accuracies and returns them.
Params:
train_loader (array): the list of all train datasets, one per client
test_loader (array): the list of test datasets, one per client
comm_matrix (numpy.array): the communication matric modeling the network
num_rounds (int): the number of data exchanges between nodes
epochs (int): the number of optimization steps between each communication (minimum 1)
num_clients (int): the number of clients in the network
latency_nodes (array): the list of delayed nodes
net (string): the neural network framework we use
optimizer (string): the chosen optimizer, SGD by default
lr (double): the learning rate for the optimizaion algorithm
Returns:
global_model (nn.Module): the final global neural network averaging all the clients
client_models (array of Net): the list of all the final client neural networks
accs (array): the corresponding accuracies, with the same shape as lrs
"""
accs = []
global_model, client_models = model_init(num_clients, net)
opt = optimizer_init(client_models, lr, optimizer)
topos = ['centralized', 'ring', 'grid']
topo = np.random.choice(topos)
comm_matrix = create_mixing_matrix(topo, num_clients)
loss, test_loss, acc = 0.0, 0.0, 0.0
for r in range(num_rounds):
old_client_models = client_models
old_topo = topo
old_comm_matrix = comm_matrix
topo = np.random.choice(topos)
# client update
for i in range(num_clients):
loss += client_update(client_models[i], opt[i], train_loader[i], epoch=epochs)
# diffuse params
diffuse_params_latency(client_models, comm_matrix, latency_nodes)
if (r > 0):
diffuse_params_latency(old_client_models, old_comm_matrix, np.setdiff1d(np.array(range(num_clients)), latency_nodes))
print("old topo: {}, new topo: {}".format(old_topo, topo))
average_models(global_model, client_models)
test_loss, acc = evaluate(global_model, test_loader)
print('%d-th round' % r)
print('average train loss %0.3g | test loss %0.3g | test acc: %0.3f' % (loss / num_clients, test_loss, acc))
accs.append(acc)
return global_model, client_models, accs
def run_latency_per_round(train_loader, test_loader, comm_matrix, num_rounds, epochs, num_clients,
latency_nodes, latency_rounds, net='net', optimizer='sgd', lr=0.1):
"""
Runs a decentralized optimization algorithm for the given learning rate for a
number of rounds, over some network. Some nodes send their weights with a one-rounds
latency, only during specific rounds. Outputs the accuracies and returns them.
Params:
train_loader (array): the list of all train datasets, one per client
test_loader (array): the list of test datasets, one per client
comm_matrix (numpy.array): the communication matric modeling the network
num_rounds (int): the number of data exchanges between nodes
epochs (int): the number of optimization steps between each communication (minimum 1)
num_clients (int): the number of clients in the network
latency_nodes (array): the list of delayed nodes
latency_rounds (array): the rounds at which latency will occur across the network
net (string): the neural network framework we use
optimizer (string): the chosen optimizer, SGD by default
lr (double): the learning rate for the optimizaion algorithm
Returns:
global_model (nn.Module): the final global neural network averaging all the clients
client_models (array of Net): the list of all the final client neural networks
accs (array): the corresponding accuracies, with the same shape as lrs
"""
accs = []
global_model, client_models = model_init(num_clients, net)
opt = optimizer_init(client_models, lr, optimizer)
loss, test_loss, acc = 0.0, 0.0, 0.0
for r in range(num_rounds):
old_client_models = client_models
# client update
for i in range(num_clients):
loss += client_update(client_models[i], opt[i], train_loader[i], epoch=epochs)
# diffuse params
if (r in latency_rounds):
diffuse_params_latency(client_models, comm_matrix, latency_nodes)
print("round {}, delay".format(r))
elif (r in latency_rounds + 1):
diffuse_params(client_models, comm_matrix)
diffuse_params_latency(old_client_models, comm_matrix, np.setdiff1d(np.array(range(num_clients)), latency_nodes))
print("round {}, delay recovery".format(r))
else:
diffuse_params(client_models, comm_matrix)
print("round {}, normal".format(r))
average_models(global_model, client_models)
test_loss, acc = evaluate(global_model, test_loader)
print('%d-th round' % r)
print('average train loss %0.3g | test loss %0.3g | test acc: %0.3f' % (loss / num_clients, test_loss, acc))
accs.append(acc)
return global_model, client_models, accs
def run_latency_per_round_changing_topo(train_loader, test_loader, num_rounds, epochs, num_clients,
latency_nodes, latency_rounds, net='net', optimizer='sgd', lr=0.1):
"""
Runs a decentralized optimization algorithm for the given learning rate for a
number of rounds, over some network. Some nodes send their weights with a one-rounds
latency, only during specific rounds. Outputs the accuracies and returns them.
Params:
train_loader (array): the list of all train datasets, one per client
test_loader (array): the list of test datasets, one per client
num_rounds (int): the number of data exchanges between nodes
epochs (int): the number of optimization steps between each communication (minimum 1)
num_clients (int): the number of clients in the network
latency_nodes (array): the list of delayed nodes
latency_rounds (array): the rounds at which latency will occur across the network
net (string): the neural network framework we use
optimizer (string): the chosen optimizer, SGD by default
lr (double): the learning rate for the optimizaion algorithm
Returns:
global_model (nn.Module): the final global neural network averaging all the clients
client_models (array of Net): the list of all the final client neural networks
accs (array): the corresponding accuracies, with the same shape as lrs
"""
accs = []
global_model, client_models = model_init(num_clients, net)
opt = optimizer_init(client_models, lr, optimizer)
topos = ['centralized', 'ring', 'grid']
topo = np.random.choice(topos)
comm_matrix = create_mixing_matrix(topo, num_clients)
loss, test_loss, acc = 0.0, 0.0, 0.0
for r in range(num_rounds):
old_client_models = client_models
old_topo = topo
old_comm_matrix = comm_matrix
topo = np.random.choice(topos)
# client update
for i in range(num_clients):
loss += client_update(client_models[i], opt[i], train_loader[i], epoch=epochs)
# diffuse params
if (r in latency_rounds):
diffuse_params_latency(client_models, comm_matrix, latency_nodes)
print("round {}, delay".format(r))
elif (r in latency_rounds + 1):
diffuse_params(client_models, comm_matrix)
diffuse_params_latency(old_client_models, old_comm_matrix, np.setdiff1d(np.array(range(num_clients)), latency_nodes))
print("round {}, delay recovery".format(r))
else:
diffuse_params(client_models, comm_matrix)
print("round {}, normal".format(r))
print("old topo: {}, new topo: {}".format(old_topo, topo))
average_models(global_model, client_models)
test_loss, acc = evaluate(global_model, test_loader)
print('%d-th round' % r)
print('average train loss %0.3g | test loss %0.3g | test acc: %0.3f' % (loss / num_clients, test_loss, acc))
accs.append(acc)
return global_model, client_models, accs
def diffuse_params_latency(client_models, communication_matrix, latency_nodes):
"""
Diffuses the client models to their neighbours except if the node has latency.
Such a node doesn't diffuse its weights now.
Params:
client_models (array): the list of all the client neural networks
communication_matrix (numpy.array): the weighted matrix defining the links between clients
latency_nodes (array): the list of nodes with latency
"""
if client_models:
client_state_dicts = [model.state_dict() for model in client_models]
keys = client_state_dicts[0].keys()
for model, weights in zip(client_models, communication_matrix):
neighbors = np.nonzero(weights)[0]
working_neigh = np.setdiff1d(neighbors, latency_nodes)
if len(working_neigh) != 0:
model.load_state_dict(
{
key: torch.stack(
[weights[j]*client_state_dicts[j][key] for j in working_neigh],
dim=0,
).sum(0) / weights.sum()
for key in keys
}
)
def nodes_latency(num_nodes, num_delay):
"""
Chooses a number of nodes among the set of clients. The chosen ones now
have latency, i.e. their weights are transmitted with some delay.
Params:
num_nodes (int): the number of clients
num_delay (int): the number of latency nodes to choose
Returns:
lat_nodes (array): the list of latency nodes
"""
assert num_delay < num_nodes
lat_nodes = []
for i in range(num_delay):
k = np.random.choice(num_nodes)
while (k in lat_nodes):
k = np.random.choice(num_nodes)
lat_nodes.append(k)
return lat_nodes
| 46.163701 | 129 | 0.68062 | 1,796 | 12,972 | 4.748886 | 0.106347 | 0.070348 | 0.020049 | 0.024622 | 0.872083 | 0.859655 | 0.851214 | 0.851214 | 0.851214 | 0.851214 | 0 | 0.007485 | 0.237897 | 12,972 | 280 | 130 | 46.328571 | 0.85525 | 0.431468 | 0 | 0.755556 | 0 | 0.02963 | 0.073181 | 0 | 0 | 0 | 0 | 0 | 0.007407 | 1 | 0.044444 | false | 0 | 0.02963 | 0 | 0.111111 | 0.118519 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
de42a4745b9f9fa1208d30a412a288871079d383 | 62 | py | Python | setu_viewer/models/__init__.py | chenx6/setu-server | 4119fd1c7a19c96158174d8c37c376082ad19222 | [
"MIT"
] | null | null | null | setu_viewer/models/__init__.py | chenx6/setu-server | 4119fd1c7a19c96158174d8c37c376082ad19222 | [
"MIT"
] | null | null | null | setu_viewer/models/__init__.py | chenx6/setu-server | 4119fd1c7a19c96158174d8c37c376082ad19222 | [
"MIT"
] | null | null | null | from .base import db
def init_app(app):
db.init_app(app) | 12.4 | 20 | 0.693548 | 12 | 62 | 3.416667 | 0.583333 | 0.341463 | 0.487805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.193548 | 62 | 5 | 21 | 12.4 | 0.82 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
ded3695ff95ec50a38893c062bd0cfc32470b66e | 32,666 | py | Python | src/blind/02_longest_substring_without_repeating_characters/02_longest_substring_without_repeating_characters.py | ihabbou/blind-must-do | 19ee182a2e0692e2501a717d47f155ec7d859f7a | [
"MIT"
] | null | null | null | src/blind/02_longest_substring_without_repeating_characters/02_longest_substring_without_repeating_characters.py | ihabbou/blind-must-do | 19ee182a2e0692e2501a717d47f155ec7d859f7a | [
"MIT"
] | null | null | null | src/blind/02_longest_substring_without_repeating_characters/02_longest_substring_without_repeating_characters.py | ihabbou/blind-must-do | 19ee182a2e0692e2501a717d47f155ec7d859f7a | [
"MIT"
] | null | null | null | # %%
def lengthOfLongestSubstring(s: str) -> int:
longest = 0
left = right = 0
lastSeen = dict()
for right, char in enumerate(s):
loc = lastSeen.get(char, -1)
if loc >= left:
left = loc
lastSeen[char] = right
newlen = right - left
longest = max(longest, newlen)
return longest
slen = len(s)-1
def unique_chars_str(ss): return len(ss) == len(set(ss))
while (slen > 1):
sub_set = set()
for start in range(len(s) - slen + 1):
sub = s[start: start + slen]
sub_set.add(sub)
if len(list(filter(unique_chars_str, sub_set))) != 0:
return slen
slen -= 1
return min(1, len(s))
# %% test
inputs = [
"abcabcbb",
"bbbbb",
"pwwkew",
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~ abcdefghijklmnopqrstuvwxyzABCD"
]
outputs = [
3, 1, 3, 95
]
for (s), expected in zip(inputs, outputs):
print(expected)
result = lengthOfLongestSubstring(s)
assert result == expected, f"Expected {expected}, got {result}"
| 593.927273 | 31,658 | 0.646299 | 454 | 32,666 | 45.768722 | 0.11674 | 1.939458 | 2.900236 | 3.855046 | 0.972713 | 0.972713 | 0.972713 | 0.972713 | 0.972713 | 0.972713 | 0 | 0.10229 | 0.020174 | 32,666 | 54 | 31,659 | 604.925926 | 0.546912 | 0.000306 | 0 | 0 | 0 | 0 | 0.641534 | 0.633296 | 0 | 0 | 0 | 0 | 0.027778 | 1 | 0.055556 | false | 0 | 0 | 0.027778 | 0.138889 | 0.027778 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 |
9d11829672a1cf793563275dee88fb8e9bc06906 | 170 | py | Python | osp/wrappers/simple_simulation/__init__.py | simphony/wrapper-development | 1d310ca782feb3b8acc2a8c275cbfb3eb8646071 | [
"BSD-3-Clause"
] | null | null | null | osp/wrappers/simple_simulation/__init__.py | simphony/wrapper-development | 1d310ca782feb3b8acc2a8c275cbfb3eb8646071 | [
"BSD-3-Clause"
] | 1 | 2020-11-30T10:44:09.000Z | 2021-04-06T09:17:50.000Z | osp/wrappers/simple_simulation/__init__.py | simphony/wrapper-development | 1d310ca782feb3b8acc2a8c275cbfb3eb8646071 | [
"BSD-3-Clause"
] | 1 | 2021-08-10T13:32:05.000Z | 2021-08-10T13:32:05.000Z | from osp.wrappers.simple_simulation.simulation_engine import SimulationEngine
from osp.wrappers.simple_simulation.simple_simulation_session import SimpleSimulationSession | 85 | 92 | 0.923529 | 19 | 170 | 8 | 0.526316 | 0.315789 | 0.197368 | 0.276316 | 0.407895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.041176 | 170 | 2 | 92 | 85 | 0.932515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
9d17a2bfc7b420f14812d41b24f05ed9fd7961b8 | 112 | py | Python | metasearch/tests/unit_test/__init__.py | suzanagi/materials-researchactivity-uoa-2020-public-metasearch-mosaicsearch_publication | 37553698e6f778b313922dca23c4ed40530d8f31 | [
"MIT"
] | null | null | null | metasearch/tests/unit_test/__init__.py | suzanagi/materials-researchactivity-uoa-2020-public-metasearch-mosaicsearch_publication | 37553698e6f778b313922dca23c4ed40530d8f31 | [
"MIT"
] | null | null | null | metasearch/tests/unit_test/__init__.py | suzanagi/materials-researchactivity-uoa-2020-public-metasearch-mosaicsearch_publication | 37553698e6f778b313922dca23c4ed40530d8f31 | [
"MIT"
] | null | null | null | from metasearch.tests.unit_test.model import ResultItemModelTests
from metasearch.tests.unit_test.view import *
| 37.333333 | 65 | 0.866071 | 15 | 112 | 6.333333 | 0.6 | 0.294737 | 0.4 | 0.484211 | 0.568421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 112 | 2 | 66 | 56 | 0.913462 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
9d1c27db305f53ca9c02ba62906d4103c206e87f | 20,248 | py | Python | gym-control/plot.py | AI-secure/rl-perturbed-reward | c894ca5dcdeadd0a0907770bb093b703092e1da1 | [
"MIT"
] | 19 | 2020-01-18T02:27:23.000Z | 2022-02-17T03:40:43.000Z | gym-control/plot.py | AI-secure/rl-perturbed-reward | c894ca5dcdeadd0a0907770bb093b703092e1da1 | [
"MIT"
] | null | null | null | gym-control/plot.py | AI-secure/rl-perturbed-reward | c894ca5dcdeadd0a0907770bb093b703092e1da1 | [
"MIT"
] | 1 | 2021-04-27T05:25:22.000Z | 2021-04-27T05:25:22.000Z | import argparse
import pandas
import os
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
sns.set_color_codes()
parser = argparse.ArgumentParser()
parser.add_argument('--log_dir', type=str, default="logs/dqn_cartpole",
help='The path of log directory [default: logs/dqn_cartpole')
parser.add_argument('--all', type=bool, default=False,
help='Plot all the curves (diff errs) [default: False]')
parser.add_argument('--weight', type=float, default=0.2,
help='Weight of noise [default: 0.2]')
FLAGS = parser.parse_args()
LOG_DIR = FLAGS.log_dir
WEIGHT = FLAGS.weight
def smooth(y, weight=0.6):
last = y[0]
smoothed = []
for point in y:
smoothed_val = last * weight + (1 - weight) * point
smoothed.append(smoothed_val)
last = smoothed_val
return smoothed
def plot_qlearn_cartpole_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))['0']
plt.plot(smooth(list(history_normal)), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.6, 0.8]:
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy.csv"))['0']
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate.csv"))['0']
plt.plot(smooth(list(history_noisy)), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(list(history_noisy), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(list(history_surrogate)), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(list(history_surrogate), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps)')
plt.legend(loc='best')
plt.savefig(os.path.join(LOG_DIR, "CartPole-v0-reward-all (Q-Learning).png"))
def plot_qlearn_cartpole(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))['0']
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy.csv"))['0']
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate.csv"))['0']
plt.plot(smooth(list(history_normal)), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy)), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate)), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-steps-" + str(weight) + " (Q-Learning).png"))
def plot_dqn_cartpole_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.5]:
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate.csv"))
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps)')
plt.legend(loc='best')
plt.savefig(os.path.join(LOG_DIR, "CartPole-v0-reward-all (DQN).png"))
def plot_dqn_cartpole(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-steps-" + str(weight) + " (DQN).png"))
plt.clf()
plt.plot(smooth(list(history_normal['episode_reward'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['episode_reward'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['episode_reward'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (reward-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='upper right')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-reward-" + str(weight) + " (DQN).png"))
def plot_sarsa_cartpole_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.5]:
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate.csv"))
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps)')
plt.legend(loc='best')
plt.savefig(os.path.join(LOG_DIR, "CartPole-v0-steps-all (SARSA).png"))
def plot_sarsa_cartpole(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-steps-" + str(weight) + " (SARSA).png"))
plt.clf()
plt.plot(smooth(list(history_normal['episode_reward'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['episode_reward'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['episode_reward'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (reward-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='upper right')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-reward-" + str(weight) + " (SARSA).png"))
def plot_cem_cartpole_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.5]:
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate.csv"))
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps)')
plt.legend(loc='best')
plt.savefig(os.path.join(LOG_DIR, "CartPole-v0-reward-all (CEM).png"))
def plot_cem_cartpole(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
history_noisy = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy.csv"))
history_surrogate = pandas.read_csv(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate.csv"))
plt.plot(smooth(list(history_normal['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['nb_episode_steps'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['nb_episode_steps']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('steps per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (steps-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-steps-" + str(weight) + " (CEM).png"))
plt.clf()
plt.plot(smooth(list(history_normal['episode_reward'])), linewidth=1.5, c=sns.color_palette()[0])
plt.plot(smooth(list(history_noisy['episode_reward'])), linewidth=1.5, c=sns.color_palette()[1])
plt.plot(smooth(list(history_surrogate['episode_reward'])), linewidth=1.5, c=sns.color_palette()[2])
plt.plot(list(history_normal['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
plt.plot(list(history_noisy['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(list(history_surrogate['episode_reward']), alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('CartPole-v0 (reward-' + str(weight) + ")")
plt.legend(['normal', 'noisy', 'surrogate'], loc='upper right')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "CartPole-v0-reward-" + str(weight) + " (CEM).png"))
def plot_ddpg_pendulum_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['episode_reward'] / 200.0)), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['episode_reward'] / 200.0), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.5]:
reward_noisy = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy_reward")))
reward_surrogate = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate_reward")))
plt.plot(smooth(reward_noisy), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(reward_noisy, alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(reward_surrogate), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(reward_surrogate, alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('Pendulum-v0 (reward)')
plt.legend(loc='best')
# plt.show()
plt.savefig(os.path.join(LOG_DIR, "Pendulum-v0-reward-all (DDPG).png"))
def plot_ddpg_pendulum(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['episode_reward'] / 200.0)), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['episode_reward'] / 200.0), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
reward_noisy = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy_reward")))
reward_surrogate = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate_reward")))
plt.plot(smooth(reward_noisy), linewidth=1.5, c=sns.color_palette()[1], label="noisy")
plt.plot(reward_noisy, alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(smooth(reward_surrogate), linewidth=1.5, c=sns.color_palette()[2], label="surrogate")
plt.plot(reward_surrogate, alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('Pendulum-v0 (reward-' + str(weight) + ")")
plt.legend(loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "Pendulum-v0-reward-" + str(weight) + " (DDPG).png"))
def plot_naf_pendulum_all():
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['episode_reward'] / 2.0)), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['episode_reward'] / 2.0), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
cnt = 0
for err in [0.2, 0.4, 0.5]:
reward_noisy = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(err)), "noisy_reward")))
reward_surrogate = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(err)), "surrogate_reward")))
plt.plot(smooth(reward_noisy), linewidth=1.5, c=sns.color_palette()[cnt+1], label="noisy (" + str(err) + ")")
plt.plot(reward_noisy, alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+1])
plt.plot(smooth(reward_surrogate), linewidth=1.5, c=sns.color_palette()[cnt+2], label="surrogate (" + str(err) + ")")
plt.plot(reward_surrogate, alpha=0.4, linewidth=0.8, c=sns.color_palette()[cnt+2])
cnt += 2
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('Pendulum-v0 (reward)')
plt.legend(loc='best')
# plt.show()
plt.savefig(os.path.join(LOG_DIR, "Pendulum-v0-reward-all (NAF).png"))
def plot_naf_pendulum(weight=0.2):
history_normal = pandas.read_csv(os.path.join(LOG_DIR, "normal.csv"))
plt.plot(smooth(list(history_normal['episode_reward'] / 2.0)), linewidth=1.5, c=sns.color_palette()[0], label="normal")
plt.plot(list(history_normal['episode_reward'] / 2.0), alpha=0.4, linewidth=0.8, c=sns.color_palette()[0])
reward_noisy = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(weight)), "noisy_reward")))
reward_surrogate = list(np.loadtxt(os.path.join(os.path.join(LOG_DIR, str(weight)), "surrogate_reward")))
plt.plot(smooth(reward_noisy), linewidth=1.5, c=sns.color_palette()[1], label="noisy")
plt.plot(reward_noisy, alpha=0.4, linewidth=0.8, c=sns.color_palette()[1])
plt.plot(smooth(reward_surrogate), linewidth=1.5, c=sns.color_palette()[2], label="surrogate")
plt.plot(reward_surrogate, alpha=0.4, linewidth=0.8, c=sns.color_palette()[2])
plt.ylabel('reward per episode')
plt.xlabel('episode')
plt.title('Pendulum-v0 (reward-' + str(weight) + ")")
plt.legend(loc='best')
# plt.show()
plt.savefig(os.path.join(os.path.join(LOG_DIR, str(weight)), "Pendulum-v0-reward-" + str(weight) + " (NAF).png"))
def plot():
if "qlearn" in LOG_DIR and "cartpole" in LOG_DIR:
plot_qlearn_cartpole(weight=WEIGHT)
elif "dqn" in LOG_DIR and "cartpole" in LOG_DIR:
plot_dqn_cartpole(weight=WEIGHT)
elif "sarsa" in LOG_DIR and "cartpole" in LOG_DIR:
plot_sarsa_cartpole(weight=WEIGHT)
elif "cem" in LOG_DIR and "cartpole" in LOG_DIR:
plot_cem_cartpole(weight=WEIGHT)
elif "ddpg" in LOG_DIR and "pendulum" in LOG_DIR:
plot_ddpg_pendulum(weight=WEIGHT)
elif "naf" in LOG_DIR and "pendulum" in LOG_DIR:
plot_naf_pendulum(weight=WEIGHT)
else:
raise NotImplementedError
def plot_all():
if "qlearn" in LOG_DIR and "cartpole" in LOG_DIR:
plot_qlearn_cartpole_all()
elif "dqn" in LOG_DIR and "cartpole" in LOG_DIR:
plot_dqn_cartpole_all()
elif "sarsa" in LOG_DIR and "cartpole" in LOG_DIR:
plot_sarsa_cartpole_all()
elif "cem" in LOG_DIR and "cartpole" in LOG_DIR:
plot_cem_cartpole_all()
elif "ddpg" in LOG_DIR and "pendulum" in LOG_DIR:
plot_ddpg_pendulum_all()
elif "naf" in LOG_DIR and "pendulum" in LOG_DIR:
plot_naf_pendulum_all()
else:
raise NotImplementedError
if __name__ == "__main__":
if FLAGS.all:
plot_all()
else:
plot()
| 54.724324 | 152 | 0.6774 | 3,211 | 20,248 | 4.113049 | 0.035503 | 0.047702 | 0.061331 | 0.109033 | 0.935489 | 0.923071 | 0.923071 | 0.923071 | 0.923071 | 0.923071 | 0 | 0.02782 | 0.130136 | 20,248 | 369 | 153 | 54.872629 | 0.722024 | 0.005927 | 0 | 0.708475 | 0 | 0 | 0.160618 | 0.006512 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050847 | false | 0 | 0.020339 | 0 | 0.074576 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9d22fc0b20574ed690587203238d9851d4ea7aa2 | 175 | py | Python | django_backend/forms/__init__.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | 3 | 2015-09-10T07:10:49.000Z | 2021-03-16T07:17:58.000Z | django_backend/forms/__init__.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | 10 | 2015-09-09T13:40:24.000Z | 2021-02-27T09:12:23.000Z | django_backend/forms/__init__.py | holg/django_backend | 6cef76a378664e6621619862e6db476788a58992 | [
"BSD-3-Clause"
] | 5 | 2016-06-12T08:20:38.000Z | 2021-02-27T09:02:30.000Z | from .fields import * # noqa
from .filterforms import * # noqa
from .forms import * # noqa
from .relation_list_fields import * # noqa
from .selectrelated import * # noqa
| 29.166667 | 43 | 0.714286 | 22 | 175 | 5.590909 | 0.409091 | 0.406504 | 0.455285 | 0.325203 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 175 | 5 | 44 | 35 | 0.878571 | 0.137143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
df9dc8ab4cac73880d49af00d03973097c99a46a | 44,783 | py | Python | tests/unit/dns/rackspace/test_services.py | satroutr/poppy | 27417f86854d9e0a04726acc263ef0a2ce9f8f6e | [
"Apache-2.0"
] | 3 | 2017-07-05T20:09:59.000Z | 2018-11-27T22:02:57.000Z | tests/unit/dns/rackspace/test_services.py | satroutr/poppy | 27417f86854d9e0a04726acc263ef0a2ce9f8f6e | [
"Apache-2.0"
] | 24 | 2017-04-18T15:14:04.000Z | 2019-03-20T19:09:07.000Z | tests/unit/dns/rackspace/test_services.py | satroutr/poppy | 27417f86854d9e0a04726acc263ef0a2ce9f8f6e | [
"Apache-2.0"
] | 8 | 2017-04-03T13:24:27.000Z | 2021-11-08T20:28:10.000Z | # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
import ddt
import mock
from oslo_config import cfg
import pyrax.exceptions as exc
from poppy.dns.rackspace import driver
from poppy.model.helpers import domain
from poppy.model import log_delivery
from poppy.model import service
from tests.unit import base
RACKSPACE_OPTIONS = [
cfg.StrOpt('username', default='',
help='Keystone Username'),
cfg.StrOpt('api_key', default='',
help='Keystone API Key'),
cfg.BoolOpt('sharding_enabled', default=True,
help='Enable Sharding?'),
cfg.IntOpt('num_shards', default=500, help='Number of Shards to use'),
cfg.IntOpt('records_limit', default=400,
help='Number of records per domain.'),
cfg.StrOpt('shard_prefix', default='cdn',
help='The shard prefix to use'),
cfg.StrOpt('url', default='mycdn.com',
help='The url for customers to CNAME to'),
cfg.StrOpt('email', help='The email to be provided to Rackspace DNS for'
'creating subdomains'),
cfg.StrOpt('auth_endpoint', default='',
help='Authentication end point for DNS'),
cfg.IntOpt('timeout', default=30, help='DNS response timeout'),
cfg.IntOpt('delay', default=1, help='DNS retry delay'),
]
RACKSPACE_GROUP = 'drivers:dns:rackspace'
@ddt.ddt
class TestServicesCreate(base.TestCase):
def setUp(self):
super(TestServicesCreate, self).setUp()
pyrax_cloud_dns_patcher = mock.patch('pyrax.cloud_dns')
pyrax_cloud_dns_patcher.start()
self.addCleanup(pyrax_cloud_dns_patcher.stop)
pyrax_set_credentials_patcher = mock.patch('pyrax.set_credentials')
pyrax_set_credentials_patcher.start()
self.addCleanup(pyrax_set_credentials_patcher.stop)
pyrax_set_setting_patcher = mock.patch('pyrax.set_setting')
pyrax_set_setting_patcher.start()
self.addCleanup(pyrax_set_setting_patcher.stop)
rs_options_patcher = mock.patch.object(
driver,
'RACKSPACE_OPTIONS',
new=RACKSPACE_OPTIONS
)
rs_options_patcher.start()
self.addCleanup(rs_options_patcher.stop)
provider = driver.DNSProvider(self.conf)
self.client = mock.Mock()
self.controller = provider.services_controller
self.controller.client = self.client
def test_create_with_no_links(self):
responders = [{
'Akamai': {
'id': str(uuid.uuid4()),
'links': []
},
'Fastly': {
'id': str(uuid.uuid4()),
'links': []
}
}]
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
dns_details = self.controller.create(responders)
for responder in responders:
for provider_name in responder:
self.assertEqual([], dns_details[provider_name]['access_urls'])
def test_create_with_provider_error(self):
responders = [{
'Akamai': {
'error': 'Create service failed with Akamai',
'error_detail': 'Error details'
},
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.mocksite.com',
'href': u'blog.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.mocksite.com',
'href': u'test.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
dns_details = self.controller.create(responders)
for responder in responders:
for provider_name in responder:
self.assertIsNotNone(dns_details[provider_name]['error'])
self.assertIsNotNone(
dns_details[provider_name]['error_detail'])
def test_create_with_subdomain_not_found_exception(self):
domain_names = [u'blog.mocksite.com', u'test.mocksite.com']
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.mocksite.com',
'href': u'blog.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.mocksite.com',
'href': u'test.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
self.client.find = mock.Mock(
side_effect=exc.NotFound('Subdomain not found'))
dns_details = self.controller.create(responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_name in domain_names:
self.assertIsNotNone(
access_urls_map[provider_name][domain_name])
def test_create_with_generic_exception(self):
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.mocksite.com',
'href': u'blog.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.mocksite.com',
'href': u'test.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
subdomain = mock.Mock()
subdomain.add_records = mock.Mock(
side_effect=exc.NotFound('Subdomain not found'))
self.client.find = mock.Mock(return_value=subdomain)
dns_details = self.controller.create(responders)
for responder in responders:
for provider_name in responder:
self.assertIsNotNone(dns_details[provider_name]['error'])
self.assertIsNotNone(
dns_details[provider_name]['error_detail'])
def test_create(self):
domain_names = [u'blog.mocksite.com', u'test.mocksite.com']
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.mocksite.com',
'href': u'blog.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.mocksite.com',
'href': u'test.mocksite.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'href': 'https://cloudfiles.rackspace/CONTAINER/OBJ',
'rel': 'log_delivery'
},
{
'domain': u'shared.mocksite.com',
'href': u'test.mocksite.com.global.prod.fastly.net',
'certificate': 'shared',
'rel': 'access_url'
},
]}
}]
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
dns_details = self.controller.create(responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_name in domain_names:
self.assertIsNotNone(
access_urls_map[provider_name][domain_name])
@ddt.ddt
class TestServicesDelete(base.TestCase):
def setUp(self):
super(TestServicesDelete, self).setUp()
pyrax_cloud_dns_patcher = mock.patch('pyrax.cloud_dns')
pyrax_cloud_dns_patcher.start()
self.addCleanup(pyrax_cloud_dns_patcher.stop)
pyrax_set_credentials_patcher = mock.patch('pyrax.set_credentials')
pyrax_set_credentials_patcher.start()
self.addCleanup(pyrax_set_credentials_patcher.stop)
pyrax_set_setting_patcher = mock.patch('pyrax.set_setting')
pyrax_set_setting_patcher.start()
self.addCleanup(pyrax_set_setting_patcher.stop)
rs_options_patcher = mock.patch.object(
driver,
'RACKSPACE_OPTIONS',
new=RACKSPACE_OPTIONS
)
rs_options_patcher.start()
self.addCleanup(rs_options_patcher.stop)
provider = driver.DNSProvider(self.conf)
self.client = mock.Mock()
self.controller = provider.services_controller
self.controller.client = self.client
def test_delete_with_exception_subdomain_not_found(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
self.client.find = mock.Mock(
side_effect=exc.NotFound('Subdomain not found'))
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertIsNotNone(dns_responder[provider_name]['error'])
self.assertIsNotNone(dns_responder[provider_name]['error_detail'])
self.assertIsNotNone(
dns_responder[provider_name]['error_class']
)
def test_delete_with_generic_exception(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
self.client.search_records = mock.Mock(
side_effect=Exception('Generic exception'))
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertIsNotNone(dns_responder[provider_name]['error'])
self.assertIsNotNone(dns_responder[provider_name]['error_detail'])
self.assertIsNotNone(
dns_responder[provider_name]['error_class']
)
def test_delete_no_records_found(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
self.client.search_records = mock.Mock(return_value=[])
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertEqual({}, dns_responder[provider_name])
def test_delete_with_more_than_one_record_found(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
},
{
u'provider_url': u'test.com.global.fastly.net',
u'domain': u'mocksite.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
records = [mock.Mock(), mock.Mock()]
self.client.search_records = mock.Mock(return_value=records)
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertIsNotNone(dns_responder[provider_name]['error'])
self.assertIsNotNone(dns_responder[provider_name]['error_detail'])
def test_delete_with_delete_exception(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
record = mock.Mock()
record.delete = mock.Mock(
side_effect=exc.NotFound('Generic exception'))
self.client.search_records = mock.Mock(return_value=[record])
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertIsNotNone(dns_responder[provider_name]['error'])
self.assertIsNotNone(dns_responder[provider_name]['error_detail'])
self.assertIsNotNone(
dns_responder[provider_name]['error_class']
)
def test_delete(self):
akamai_access_urls = [
{
u'provider_url': u'mycdn.com.v2.mdc.edgesuite.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.com.cdn80.mycdn.com'
}
]
fastly_access_urls = [
{
u'provider_url': u'mocksite.com.global.fastly.net',
u'domain': u'mocksite.com',
u'operator_url': u'mocksite.cdn80.mycdn.com'
}
]
akamai_details = mock.Mock()
akamai_details.access_urls = akamai_access_urls
fastly_details = mock.Mock()
fastly_details.access_urls = fastly_access_urls
provider_details = {
'Akamai': akamai_details,
'Fastly': fastly_details
}
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
record = mock.Mock()
self.client.search_records = mock.Mock(return_value=[record])
dns_responder = self.controller.delete(provider_details)
for provider_name in provider_details:
self.assertEqual({}, dns_responder[provider_name])
@ddt.ddt
class TestServicesUpdate(base.TestCase):
def setUp(self):
super(TestServicesUpdate, self).setUp()
pyrax_cloud_dns_patcher = mock.patch('pyrax.cloud_dns')
pyrax_cloud_dns_patcher.start()
self.addCleanup(pyrax_cloud_dns_patcher.stop)
pyrax_set_credentials_patcher = mock.patch('pyrax.set_credentials')
pyrax_set_credentials_patcher.start()
self.addCleanup(pyrax_set_credentials_patcher.stop)
pyrax_set_setting_patcher = mock.patch('pyrax.set_setting')
pyrax_set_setting_patcher.start()
self.addCleanup(pyrax_set_setting_patcher.stop)
rs_options_patcher = mock.patch.object(
driver,
'RACKSPACE_OPTIONS',
new=RACKSPACE_OPTIONS
)
rs_options_patcher.start()
self.addCleanup(rs_options_patcher.stop)
self.client = mock.Mock()
provider = driver.DNSProvider(self.conf)
self.controller = provider.services_controller
self.controller.client = self.client
self.domains_old = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com')]
self.origins_old = []
fastly_access_urls_old = [
{
u'provider_url': u'test.domain.com.global.prod.fastly.net',
u'domain': u'test.domain.com',
u'operator_url': u'test.domain.com.cdn80.mycdn.com'
},
{
u'provider_url': u'blog.domain.com.global.prod.fastly.net',
u'domain': u'blog.domain.com',
u'operator_url': u'blog.domain.com.cdn80.mycdn.com'
},
{
"log_delivery": [
{
"internalURL": "https://internal.storage.com",
"publicURL": "https://external.storage.com"
}
]
}
]
fastly_provider_details_old = mock.Mock()
fastly_provider_details_old.access_urls = fastly_access_urls_old
provider_details_old = {
'Fastly': fastly_provider_details_old
}
self.service_old = service.Service(service_id=uuid.uuid4(),
name='myservice',
domains=self.domains_old,
origins=self.origins_old,
flavor_id='standard')
self.service_old.provider_details = provider_details_old
def test_update_add_domains_with_dns_exception(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
client = mock.Mock()
client.find = mock.Mock(
side_effect=Exception('DNS Exception'))
self.controller.client = client
domains_new = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com'),
domain.Domain('pictures.domain.com')]
service_updates = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'pictures.domain.com',
'href': u'pictures.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_updates,
responders)
for responder in responders:
for provider_name in responder:
self.assertIsNotNone(dns_details[provider_name]['error'])
self.assertIsNotNone(
dns_details[provider_name]['error_detail'])
self.assertIsNotNone(
dns_details[provider_name]['error_class']
)
def test_update_add_domains_with_no_domains_in_update(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
client = mock.Mock()
self.controller.client = client
service_updates = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=[],
origins=[],
flavor_id='standard'
)
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'pictures.domain.com',
'href': u'pictures.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(
self.service_old,
service_updates,
responders
)
access_urls_map = {}
for provider_name in self.service_old.provider_details:
provider_detail = self.service_old.provider_details[provider_name]
access_urls = provider_detail.access_urls
access_urls_map[provider_name] = {'access_urls': access_urls}
self.assertEqual(access_urls_map, dns_details)
def test_update_remove_domains_provider_error(self):
domains_new = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com'),
domain.Domain('pictures.domain.com')]
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'error': 'Create service failed'
}
}]
dns_details = self.controller.update(self.service_old,
service_new,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
if 'operator_url' in access_urls:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_old in self.domains_old:
self.assertIsNotNone(
access_urls_map[provider_name][domain_old.domain])
def test_update_remove_domains_with_subdomain_not_found_exception(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
client = mock.Mock()
client.find = mock.Mock(
side_effect=exc.NotFound('Subdomain not found'))
records = [mock.Mock(), mock.Mock()]
client.search_records = mock.Mock(return_value=records)
self.controller.client = client
domains_new = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com')]
service_updates = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_updates,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
if 'operator_url' in access_urls:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
def test_update_remove_domains(self):
domains_new = [domain.Domain('test.domain.com')]
service_updates = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_updates,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
def test_update_same_domains(self):
service_updates = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=self.domains_old,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_updates,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
if 'operator_url' in access_urls:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_old in self.domains_old:
self.assertIsNotNone(
access_urls_map[provider_name][domain_old.domain])
def test_update_add_domains(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
domains_new = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com'),
domain.Domain('pictures.domain.com')]
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'pictures.domain.com',
'href': u'pictures.domain.com.global.prod.fastly.net',
'rel': 'access_url',
'certificate': 'san',
'old_operator_url': 'old.operator.url.cdn99.mycdn.com'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_new,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
def test_update_add_domains_http_to_https_upgrade(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
domains_new = [
domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com')
]
self.service_old.domains = domains_new
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url',
'certificate': 'san',
'old_operator_url': 'old.operator.url.cdn99.mycdn.com'
}
]}
}]
dns_details = self.controller.update(
self.service_old,
service_new,
responders
)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
@mock.patch('re.match')
def test_update_add_domains_https_upgrade_regex_exception(self, re_mock):
re_mock.return_value.groups.return_value = (None,)
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
domains_new = [
domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com')
]
self.service_old.domains = domains_new
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url',
'certificate': 'san',
'old_operator_url': 'old.operator.url.cdn99.mycdn.com'
}
]}
}]
dns_details = self.controller.update(
self.service_old,
service_new,
responders
)
self.assertTrue('error' in dns_details['Fastly'])
self.assertTrue('error_detail' in dns_details['Fastly'])
self.assertTrue('error_class' in dns_details['Fastly'])
self.assertTrue('ValueError' in dns_details['Fastly']['error_class'])
def test_update_add_domains_https_upgrade_create_cname_record(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
subdomain.find_record.side_effect = exc.DomainRecordNotFound(
"Mock -- couldn't find cname record."
)
self.client.find = mock.Mock(return_value=subdomain)
domains_new = [
domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com')
]
self.service_old.domains = domains_new
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard')
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url',
'certificate': 'san',
'old_operator_url': 'old.operator.url.cdn99.mycdn.com'
}
]}
}]
dns_details = self.controller.update(
self.service_old,
service_new,
responders
)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
def test_update_add_domains_keeps_log_delivery(self):
subdomain = mock.Mock()
subdomain.add_records = mock.Mock()
self.client.find = mock.Mock(return_value=subdomain)
domains_new = [domain.Domain('test.domain.com'),
domain.Domain('blog.domain.com'),
domain.Domain('pictures.domain.com')]
service_new = service.Service(
service_id=self.service_old.service_id,
name='myservice',
domains=domains_new,
origins=[],
flavor_id='standard',
log_delivery=log_delivery.LogDelivery(enabled=True)
)
self.service_old.log_delivery = log_delivery.LogDelivery(enabled=True)
responders = [{
'Fastly': {
'id': str(uuid.uuid4()),
'links': [
{
'domain': u'test.domain.com',
'href': u'test.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'blog.domain.com',
'href': u'blog.domain.com.global.prod.fastly.net',
'rel': 'access_url'
},
{
'domain': u'pictures.domain.com',
'href': u'pictures.domain.com.global.prod.fastly.net',
'rel': 'access_url'
}
]}
}]
dns_details = self.controller.update(self.service_old,
service_new,
responders)
access_urls_map = {}
for provider_name in dns_details:
access_urls_map[provider_name] = {}
access_urls_list = dns_details[provider_name]['access_urls']
for access_urls in access_urls_list:
if 'operator_url' in access_urls:
access_urls_map[provider_name][access_urls['domain']] = (
access_urls['operator_url'])
if 'log_delivery' in access_urls:
for ld_url in access_urls['log_delivery']:
self.assertIsNotNone(ld_url['internalURL'])
self.assertIsNotNone(ld_url['publicURL'])
for responder in responders:
for provider_name in responder:
for domain_new in domains_new:
self.assertIsNotNone(
access_urls_map[provider_name][domain_new.domain])
def test_gather_cname_links_positive(self):
cname_links = self.controller.gather_cname_links(self.service_old)
# TODO(isaacm): Add assertions on the returned object
self.assertIsNotNone(cname_links)
def test_enable_positive(self):
responder_enable = self.controller.enable(self.service_old)
# TODO(isaacm): Add assertions on the returned object
self.assertIsNotNone(responder_enable)
def test_disable_positive(self):
responder_disable = self.controller.disable(self.service_old)
# TODO(isaacm): Add assertions on the returned object
self.assertIsNotNone(responder_disable)
def test_is_shard_full_shard_not_found(self):
self.client.find.side_effect = exc.NotFound(404)
self.assertTrue(self.controller.is_shard_full('shard_name'))
def test_is_shard_full_false(self):
find_mock = mock.Mock()
find_mock.list_records.return_value = range(100)
self.client.find.return_value = find_mock
self.client.list_records_next_page.side_effect = exc.NoMoreResults
self.assertFalse(self.controller.is_shard_full('shard_name'))
def test_is_shard_full_true(self):
find_mock = mock.Mock()
find_mock.list_records.return_value = range(600)
self.client.find.return_value = find_mock
self.client.list_records_next_page.side_effect = exc.NoMoreResults
self.assertTrue(self.controller.is_shard_full('shard_name'))
def test_is_shard_full_paginate_true(self):
find_mock = mock.Mock()
find_mock.list_records.return_value = range(300)
self.client.find.return_value = find_mock
self.client.list_records_next_page.side_effect = [
range(300),
exc.NoMoreResults,
]
self.assertTrue(self.controller.is_shard_full('shard_name'))
| 37.44398 | 79 | 0.533305 | 4,446 | 44,783 | 5.123482 | 0.061404 | 0.067167 | 0.02454 | 0.028359 | 0.86549 | 0.854691 | 0.83195 | 0.813995 | 0.809298 | 0.798894 | 0 | 0.003234 | 0.364692 | 44,783 | 1,195 | 80 | 37.475314 | 0.797406 | 0.015877 | 0 | 0.720976 | 0 | 0 | 0.156036 | 0.052163 | 0 | 0 | 0 | 0.000837 | 0.043902 | 1 | 0.03122 | false | 0 | 0.009756 | 0 | 0.043902 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5f27f43884fce687470a5e727a23465a2a2a3ef2 | 147 | py | Python | examples/hex/ex3.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/hex/ex3.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | examples/hex/ex3.py | mcorne/python-by-example | 15339c0909c84b51075587a6a66391100971c033 | [
"MIT"
] | null | null | null | print('%#x' % 255, '%x' % 255, '%X' % 255)
print(format(255, '#x'), format(255, 'x'), format(255, 'X'))
print(f'{255:#x}', f'{255:x}', f'{255:X}')
| 36.75 | 60 | 0.489796 | 27 | 147 | 2.666667 | 0.185185 | 0.444444 | 0.416667 | 0.222222 | 0.625 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0.209302 | 0.122449 | 147 | 3 | 61 | 49 | 0.348837 | 0 | 0 | 0 | 0 | 0 | 0.22449 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
a065ee8b24c553119b40cb3d0cce2fc958533eb0 | 2,261 | py | Python | src/profiles/models.py | salemzii/ChopFast | 95ea88387ecfdb56bd643970b69425b1a1c6f388 | [
"MIT"
] | null | null | null | src/profiles/models.py | salemzii/ChopFast | 95ea88387ecfdb56bd643970b69425b1a1c6f388 | [
"MIT"
] | null | null | null | src/profiles/models.py | salemzii/ChopFast | 95ea88387ecfdb56bd643970b69425b1a1c6f388 | [
"MIT"
] | null | null | null | from django.db import models
from PIL import Image
from django.contrib.auth.models import User
import uuid
class Customer(models.Model):
id = models.UUIDField(
default=uuid.uuid4,
primary_key=True,
editable=False
)
user = models.OneToOneField(User, on_delete=models.CASCADE)
image= models.ImageField(default = 'default.jpg', blank=True, upload_to='profile_pics')
address = models.CharField(max_length=75)
phone_number = models.IntegerField(default=0000, null=True, blank=True)
def __str__(self):
template = f"{self.user.username}'s profile."
return template.format(self)
class Rider(models.Model):
id = models.UUIDField(
default=uuid.uuid4,
primary_key= True,
editable=False
)
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default= 'default.jpg', upload_to='profile_pics')
address = models.CharField(max_length=75)
is_active = models.BooleanField(default=False)
phone_number = models.IntegerField(default=0000, null=True, blank=True)
def __str__(self):
template = f"{self.user.username}'s profile."
return template.format(self)
class Staff(models.Model):
id = models.UUIDField(
default=uuid.uuid4,
primary_key=True,
editable=False
)
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default='default.jpg', upload_to='profile_pics')
address = models.CharField(max_length=75)
is_active = models.BooleanField(default=False)
phone_number = models.IntegerField(default=0000, null=True, blank=True)
def __str__(self):
template = f"{self.user.username}'s profile."
return template.format(self)
class Supplier(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
name = models.CharField(max_length=120, unique=True)
address = models.CharField(max_length=220)
phone_number = models.IntegerField(default=0000, null=True, blank=True)
created_date = models.DateField(auto_now_add=True)
def __str__(self):
template = f"{self.user.username}'s profile."
return template.format(self)
# Create your models here.
| 31.84507 | 91 | 0.698806 | 283 | 2,261 | 5.431095 | 0.254417 | 0.029278 | 0.058556 | 0.078074 | 0.843852 | 0.823683 | 0.823683 | 0.823683 | 0.792453 | 0.792453 | 0 | 0.016885 | 0.18797 | 2,261 | 70 | 92 | 32.3 | 0.820261 | 0.010615 | 0 | 0.722222 | 0 | 0 | 0.086353 | 0.039374 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.074074 | 0 | 0.703704 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
264b0ecae9b9970237b7924c1887171bc8093929 | 8,319 | py | Python | tracklib/filter/eof.py | xueyuelei/tracklib | d33912baf1bebd1605d5e9c8dfc31484c96628cc | [
"MIT"
] | 5 | 2020-03-04T11:36:19.000Z | 2020-06-21T16:49:45.000Z | tracklib/filter/eof.py | xueyuelei/tracklib | d33912baf1bebd1605d5e9c8dfc31484c96628cc | [
"MIT"
] | null | null | null | tracklib/filter/eof.py | xueyuelei/tracklib | d33912baf1bebd1605d5e9c8dfc31484c96628cc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Extended object tracker
REFERENCE:
[1].
'''
from __future__ import division, absolute_import, print_function
__all__ = ['KochEOFilter', 'FeldmannEOFilter', 'LanEOFilter']
import numpy as np
import scipy.linalg as lg
from .base import EOFilterBase
class KochEOFilter(EOFilterBase):
'''
Extended object particle filter using Koch approach
'''
def __init__(self, F, H, D, interval, tau, dim=2):
self._F = F.copy()
self._H = H.copy()
self._D = D.copy()
self._at = np.exp(-interval / tau) # attenuation factor
self._dim = dim
def init(self, state, cov, df, extension):
self._df = df
self._scale = extension * (df - self._dim - 1)
self._single_cov = cov.copy()
self._state = state.copy()
self._cov = np.kron(extension, cov)
self._ext = extension.copy()
self._init = True
def predict(self):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
# predict inverse wishart parameters
df = self._df
self._df = self._at * self._df
w = (self._df - self._dim - 1) / (df - self._dim - 1)
self._scale = w * self._scale
# predict joint state
self._ext = self._scale / (self._df - self._dim - 1) * 2
self._single_cov = self._F @ self._single_cov @ self._F.T + self._D
self._single_cov = (self._single_cov + self._single_cov.T) / 2
df_tilde = self._df + len(self._state) // self._dim + len(self._state)
self._cov = np.kron(self._ext, self._single_cov) / (df_tilde - 2)
F_tilde = np.kron(np.eye(self._dim), self._F)
self._state = np.dot(F_tilde, self._state)
return self._state, self._cov, self._ext
def correct(self, zs):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
n = len(zs)
z_mean = np.mean(zs, axis=0)
eps = z_mean - np.dot(np.kron(np.eye(self._dim), self._H), self._state)
z_center = zs - z_mean
Z = np.dot(z_center.T, z_center)
S = self._H @ self._single_cov @ self._H.T + 1 / n
S = (S + S.T) / 2
S_inv = lg.inv(S)
K = self._single_cov @ self._H.T @ S_inv
N = S_inv * np.outer(eps, eps)
# correct inverse wishart parameters
self._df += n
self._scale += N + Z
# correct joint state
self._ext = self._scale / (self._df - self._dim - 1) * 2
self._single_cov -= K @ S @ K.T
self._single_cov = (self._single_cov + self._single_cov.T) / 2
df_tilde = self._df + len(self._state) // self._dim + len(self._state)
self._cov = np.kron(self._ext, self._single_cov) / (df_tilde - 2)
K_tilde = np.kron(np.eye(self._dim), K)
self._state += np.dot(K_tilde, eps)
return self._state, self._cov, self._ext
def distance(self, zs, **kwargs):
return super().distance(zs, **kwargs)
def likelihood(self, zs, **kwargs):
return super().likelihood(zs, **kwargs)
class FeldmannEOFilter(EOFilterBase):
'''
Extended object particle filter using Feldmann approach
'''
def __init__(self, F, H, Q, R, interval, tau, dim=2):
self._F = F.copy()
self._H = H.copy()
self._Q = Q.copy()
self._R = R.copy()
self._at = np.exp(-interval / tau) # attenuation
self._dim = dim
def init(self, state, cov, df, extension):
self._df = df - self._dim - 1
self._state = state.copy()
self._cov = cov.copy()
self._ext = extension.copy()
self._init = True
def predict(self):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
self._state = np.dot(self._F, self._state)
self._cov = self._F @ self._cov @ self._F.T + self._Q
self._cov = (self._cov + self._cov.T) / 2
self._ext = self._ext
self._df = 2 + self._at * (self._df - 2)
return self._state, self._cov, self._ext
def correct(self, zs):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
n = len(zs)
z_mean = np.mean(zs, axis=0)
eps = z_mean - np.dot(self._H, self._state)
z_center = zs - z_mean
Z = np.dot(z_center.T, z_center)
Y = self._ext / 4 + self._R
S = self._H @ self._cov @ self._H.T + Y / n
S = (S + S.T) / 2
X_chol = lg.cholesky(self._ext, lower=True)
S_chol = lg.inv(lg.cholesky(S, lower=True))
Y_chol = lg.inv(lg.cholesky(Y, lower=True))
N = np.outer(eps, eps)
N_hat = X_chol @ S_chol @ N @ S_chol.T @ X_chol.T
Z_hat = X_chol @ Y_chol @ Z @ Y_chol.T @ X_chol.T
df = self._df
self._df += n
self._ext = (df * self._ext + N_hat + Z_hat) / self._df
K = self._cov @ self._H.T @ lg.inv(S)
self._state += K @ eps
self._cov -= K @ S @ K.T
self._cov = (self._cov + self._cov.T) / 2
return self._state, self._cov, self._ext
def distance(self, zs, **kwargs):
return super().distance(zs, **kwargs)
def likelihood(self, zs, **kwargs):
return super().likelihood(zs, **kwargs)
class LanEOFilter(EOFilterBase):
'''
Extended object particle filter using Lan approach
'''
def __init__(self, F, H, D, R, delta, dim=2):
self._F = F.copy()
self._H = H.copy()
self._D = D.copy()
self._R = R.copy()
self._delta = delta
self._dim = dim
def init(self, state, cov, df, extension):
self._df = df
self._scale = extension * (df - 2 * self._dim - 2)
self._single_cov = cov.copy()
self._state = state.copy()
self._cov = np.kron(extension, cov)
self._ext = extension.copy()
self._init = True
def predict(self):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
# predict inverse wishart parameters
lamb = self._df - 2 * self._dim - 2
self._df = 2 * self._delta * (lamb + 1) * (lamb - 1) * (lamb - 2) / lamb**2 / (lamb + self._delta) + 2 * self._dim + 4
self._scale = (self._df - 2 * self._dim - 2) / lamb * self._scale
# predict joint state
self._ext = self._scale / (self._df - 2 * self._dim - 2)
self._single_cov = self._F @ self._single_cov @ self._F.T + self._D
self._single_cov = (self._single_cov + self._single_cov.T) / 2
self._cov = np.kron(self._ext, self._single_cov)
F_tilde = np.kron(np.eye(self._dim), self._F)
self._state = np.dot(F_tilde, self._state)
return self._state, self._cov, self._ext
def correct(self, zs):
if self._init == False:
raise RuntimeError('filter must be initialized with init() before use')
n = len(zs)
z_mean = np.mean(zs, axis=0)
eps = z_mean - np.dot(np.kron(np.eye(self._dim), self._H), self._state)
z_center = zs - z_mean
Z = np.dot(z_center.T, z_center)
B = lg.cholesky(self._ext / 4 + self._R, lower=True) @ lg.inv(lg.cholesky(self._ext, lower=True))
B_inv = lg.inv(B)
S = self._H @ self._single_cov @ self._H.T + lg.det(B)**(2 / self._dim) / n
S = (S + S.T) / 2
S_inv = lg.inv(S)
K = self._single_cov @ self._H.T @ S_inv
N = S_inv * np.outer(eps, eps)
# correct inverse wishart parameters
self._df += n
self._scale += N + B_inv @ Z @ B_inv.T
# correct joint state
self._ext = self._scale / (self._df - 2 * self._dim - 2)
self._single_cov -= K @ S @ K.T
self._single_cov = (self._single_cov + self._single_cov.T) / 2
self._cov = np.kron(self._ext, self._single_cov)
K_tilde = np.kron(np.eye(self._dim), K)
self._state += np.dot(K_tilde, eps)
return self._state, self._cov, self._ext
def distance(self, zs, **kwargs):
return super().distance(zs, **kwargs)
def likelihood(self, zs, **kwargs):
return super().likelihood(zs, **kwargs) | 34.094262 | 126 | 0.57002 | 1,217 | 8,319 | 3.635168 | 0.091208 | 0.050633 | 0.082278 | 0.061483 | 0.859855 | 0.810805 | 0.750678 | 0.736438 | 0.703888 | 0.688969 | 0 | 0.008482 | 0.291381 | 8,319 | 244 | 127 | 34.094262 | 0.741985 | 0.056978 | 0 | 0.730994 | 0 | 0 | 0.042786 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.023392 | 0.035088 | 0.216374 | 0.005848 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cd8e57ac75b8fc835d98e3e37b98001007a129ed | 64 | py | Python | far_ws/src/follow_ahead_rl/gym-gazeboros_ac/gym_gazeboros_ac/envs/__init__.py | Evoiis/Robot-Follow-Ahead-with-Obstacle-Avoidance | 72a407eafc7cdebf0639314c4f4ad0dd6902e6e8 | [
"Unlicense"
] | null | null | null | far_ws/src/follow_ahead_rl/gym-gazeboros_ac/gym_gazeboros_ac/envs/__init__.py | Evoiis/Robot-Follow-Ahead-with-Obstacle-Avoidance | 72a407eafc7cdebf0639314c4f4ad0dd6902e6e8 | [
"Unlicense"
] | 5 | 2021-03-26T01:30:13.000Z | 2021-04-22T22:19:03.000Z | far_ws/src/follow_ahead_rl/gym-gazeboros_ac/gym_gazeboros_ac/envs/__init__.py | Evoiis/Robot-Follow-Ahead-with-Obstacle-Avoidance | 72a407eafc7cdebf0639314c4f4ad0dd6902e6e8 | [
"Unlicense"
] | 1 | 2021-05-05T00:57:43.000Z | 2021-05-05T00:57:43.000Z | from gym_gazeboros_ac.envs.gym_gazeboros_ac import GazeborosEnv
| 32 | 63 | 0.90625 | 10 | 64 | 5.4 | 0.7 | 0.444444 | 0.518519 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 64 | 1 | 64 | 64 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
26d5e1ed72788d66763e45cc45bd38bc17536e23 | 204 | py | Python | cfpq_data/grammars/__init__.py | viabzalov/CFPQ_Data | 67239c876897d04ba2f4ef88a75fd4a38a494efa | [
"Apache-2.0"
] | 8 | 2020-03-30T17:47:31.000Z | 2022-01-27T13:36:39.000Z | cfpq_data/grammars/__init__.py | viabzalov/CFPQ_Data | 67239c876897d04ba2f4ef88a75fd4a38a494efa | [
"Apache-2.0"
] | 27 | 2019-10-21T09:31:08.000Z | 2021-11-07T03:19:15.000Z | cfpq_data/grammars/__init__.py | viabzalov/CFPQ_Data | 67239c876897d04ba2f4ef88a75fd4a38a494efa | [
"Apache-2.0"
] | 14 | 2019-10-18T12:49:47.000Z | 2021-08-03T14:20:17.000Z | from cfpq_data.grammars.rsm import *
from cfpq_data.grammars.converters import *
from cfpq_data.grammars.readwrite import *
from cfpq_data.grammars.utils import *
from cfpq_data.grammars.samples import *
| 34 | 43 | 0.828431 | 30 | 204 | 5.466667 | 0.333333 | 0.243902 | 0.365854 | 0.609756 | 0.634146 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098039 | 204 | 5 | 44 | 40.8 | 0.891304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f808b8c639083159139ce82f99d8c0d34a5eed1c | 90 | py | Python | irl/common/utils/__init__.py | uidilr/deepirl_chainer | 45f6134fe457bdae1484e4847ab0701f39940faa | [
"MIT"
] | 16 | 2019-06-25T11:54:38.000Z | 2022-02-13T15:14:40.000Z | irl/common/utils/__init__.py | uidilr/deepirl_chainer | 45f6134fe457bdae1484e4847ab0701f39940faa | [
"MIT"
] | 4 | 2019-07-17T15:17:25.000Z | 2020-09-03T12:12:16.000Z | irl/common/utils/__init__.py | uidilr/deepirl_chainer | 45f6134fe457bdae1484e4847ab0701f39940faa | [
"MIT"
] | 3 | 2019-07-17T16:45:07.000Z | 2020-12-15T16:52:26.000Z | from irl.common.utils.get_states_actions_next_states import get_states_actions_next_states | 90 | 90 | 0.933333 | 15 | 90 | 5.066667 | 0.6 | 0.236842 | 0.421053 | 0.526316 | 0.684211 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033333 | 90 | 1 | 90 | 90 | 0.873563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
f854c848f0ceea8af78f78036033e62f2cd92a68 | 8,157 | py | Python | tools/97_addons/webhook-python/webhook/webhookapp/functions.py | niklaushirt/aiops-install-awx-33 | a062656b39ffa6c37b4aa510e79a811de2d0a3c0 | [
"IBM-pibs"
] | null | null | null | tools/97_addons/webhook-python/webhook/webhookapp/functions.py | niklaushirt/aiops-install-awx-33 | a062656b39ffa6c37b4aa510e79a811de2d0a3c0 | [
"IBM-pibs"
] | null | null | null | tools/97_addons/webhook-python/webhook/webhookapp/functions.py | niklaushirt/aiops-install-awx-33 | a062656b39ffa6c37b4aa510e79a811de2d0a3c0 | [
"IBM-pibs"
] | null | null | null | import requests
from requests.auth import HTTPBasicAuth
import json
import datetime
import random
import os
ITERATE_ELEMENT=os.environ.get('ITERATE_ELEMENT')
DEBUG=os.environ.get('WEBHOOK_DEBUG')
EVENT_MAPPING=os.environ.get('EVENT_MAPPING')
EVENT_TEMPLATE=os.environ.get('EVENT_TEMPLATE')
print (' ------------------------------------------------------------------------------------------------')
print (' 📛 TEST')
# ----------------------------------------------------------------------------------------------------------------------------------------------------
# INJECT EVENTS IN ARRAY
# ----------------------------------------------------------------------------------------------------------------------------------------------------
def injectEvents(DATALAYER_ROUTE,DATALAYER_USER,DATALAYER_PWD,REQUEST,DEBUG):
print('')
print (' ------------------------------------------------------------------------------------------------')
print (' 📛 Inject Events')
body_unicode = REQUEST.body.decode('utf-8')
body = json.loads(body_unicode)
if DEBUG=='true':
print('**************************************************************************************')
print('**************************************************************************************')
print('DEBUG PAYLOAD')
print('')
print(str(body))
print('**************************************************************************************')
print('DEBUG EVENT_TEMPLATE')
print('')
print(str(EVENT_TEMPLATE))
print('**************************************************************************************')
print('DEBUG EVENT_MAPPING')
print('')
print(str(EVENT_MAPPING))
print('**************************************************************************************')
print('**************************************************************************************')
events = body[ITERATE_ELEMENT]
for event in events:
payload=EVENT_TEMPLATE
mappingelements=EVENT_MAPPING.split(';')
for line in mappingelements:
line=line.strip()
elements=line.split(',')
if DEBUG=='true':
print('Mapping Line:'+str(line))
actInputKey = elements[0].strip()
actOutputKey = elements[1].strip()
if actInputKey in event:
actValue = str(event[actInputKey]).strip()
if DEBUG=='true':
print(' 📥 actInputKey:'+str(actInputKey))
print(' 💾 actOutputKey:'+str(actOutputKey))
print(' ✅ actValue:'+str(actValue))
payload=payload.replace('@@'+str(actOutputKey),actValue)
else:
if DEBUG=='true':
print(' ❗ Input field missing - Setting empty:'+str(actOutputKey))
if 'EXPIRY' in actOutputKey:
payload=payload.replace('@@'+str(actOutputKey),'600000')
elif'override_with_date' in actInputKey:
timestamp = datetime.datetime.now()
MY_TIMESTAMP_FORMATTED = timestamp.strftime("%Y-%m-%dT%H:%M:%S.000Z")
payload=payload.replace('@@'+str(actOutputKey),str(MY_TIMESTAMP_FORMATTED))
else:
payload=payload.replace('@@'+str(actOutputKey),'')
if DEBUG=='true':
print ('PAYLOAD FINAL'+str(payload))
#timestamp = str(datetime.datetime.now())
#+%Y-%m-%dT%H:%M:%S
url = 'https://'+DATALAYER_ROUTE+'/irdatalayer.aiops.io/active/v1/events'
auth=HTTPBasicAuth(DATALAYER_USER, DATALAYER_PWD)
headers = {'Content-Type': 'application/json', 'Accept-Charset': 'UTF-8', 'x-username' : 'admin', 'x-subscription-id' : 'cfd95b7e-3bc7-4006-a4a8-a73a79c71255'}
response = requests.post(url, data=str(payload), headers=headers, auth=auth)#, verify=False)
print (' RESULT:'+str(response.content))
print ('')
print ('')
print ('')
#print(events)
print (' ✅ Inject Events')
print (' ------------------------------------------------------------------------------------------------')
print ('')
print ('')
print ('')
return 'OK'
# ----------------------------------------------------------------------------------------------------------------------------------------------------
# INJECT SingleEVENTS
# ----------------------------------------------------------------------------------------------------------------------------------------------------
def injectEventsSingle(DATALAYER_ROUTE,DATALAYER_USER,DATALAYER_PWD,REQUEST,DEBUG):
print('')
print (' ------------------------------------------------------------------------------------------------')
print (' 📛 Inject Events Single')
body_unicode = REQUEST.body.decode('utf-8')
body = json.loads(body_unicode)
if DEBUG=='true':
print('**************************************************************************************')
print('**************************************************************************************')
print('DEBUG PAYLOAD')
print('')
print(str(body))
print('**************************************************************************************')
print('DEBUG EVENT_TEMPLATE')
print('')
print(str(EVENT_TEMPLATE))
print('**************************************************************************************')
print('DEBUG EVENT_MAPPING')
print('')
print(str(EVENT_MAPPING))
print('**************************************************************************************')
print('**************************************************************************************')
payload=EVENT_TEMPLATE
event = body
mappingelements=EVENT_MAPPING.split(';')
for line in mappingelements:
line=line.strip()
elements=line.split(',')
if DEBUG=='true':
print('Mapping Line:'+str(line))
actInputKey = elements[0].strip()
actOutputKey = elements[1].strip()
if actInputKey in event:
actValue = str(event[actInputKey]).strip()
if DEBUG=='true':
print(' 📥 actInputKey:'+str(actInputKey))
print(' 💾 actOutputKey:'+str(actOutputKey))
print(' ✅ actValue:'+str(actValue))
payload=payload.replace('@@'+str(actOutputKey),actValue)
else:
if DEBUG=='true':
print(' ❗ Input field missing - Setting empty:'+str(actOutputKey))
if 'EXPIRY' in actOutputKey:
payload=payload.replace('@@'+str(actOutputKey),'600000')
elif'override_with_date' in actInputKey:
timestamp = datetime.datetime.now()
MY_TIMESTAMP_FORMATTED = timestamp.strftime("%Y-%m-%dT%H:%M:%S.000Z")
payload=payload.replace('@@'+str(actOutputKey),str(MY_TIMESTAMP_FORMATTED))
else:
payload=payload.replace('@@'+str(actOutputKey),'')
if DEBUG=='true':
print ('PAYLOAD FINAL'+str(payload))
#timestamp = str(datetime.datetime.now())
#+%Y-%m-%dT%H:%M:%S
url = 'https://'+DATALAYER_ROUTE+'/irdatalayer.aiops.io/active/v1/events'
auth=HTTPBasicAuth(DATALAYER_USER, DATALAYER_PWD)
headers = {'Content-Type': 'application/json', 'Accept-Charset': 'UTF-8', 'x-username' : 'admin', 'x-subscription-id' : 'cfd95b7e-3bc7-4006-a4a8-a73a79c71255'}
response = requests.post(url, data=str(payload), headers=headers, auth=auth)#, verify=False)
print (' RESULT:'+str(response.content))
print ('')
print ('')
print ('')
#print(events)
print (' ✅ Inject Events')
print (' ------------------------------------------------------------------------------------------------')
print ('')
print ('')
print ('')
return 'OK'
| 39.597087 | 167 | 0.414613 | 615 | 8,157 | 5.442276 | 0.188618 | 0.098596 | 0.05378 | 0.047804 | 0.874813 | 0.874813 | 0.874813 | 0.874813 | 0.874813 | 0.874813 | 0 | 0.01062 | 0.21503 | 8,157 | 205 | 168 | 39.790244 | 0.510073 | 0.099301 | 0 | 0.875862 | 0 | 0 | 0.345891 | 0.232596 | 0 | 0 | 0 | 0 | 0 | 1 | 0.013793 | false | 0 | 0.041379 | 0 | 0.068966 | 0.468966 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
f8b684072e405127024e3106ddedd40820aee116 | 13,581 | py | Python | backend/tests/test_validation.py | ScilifelabDataCentre/project_portal | a274e9e8ac2f92972a240154afd73e6137bec9db | [
"BSD-3-Clause"
] | 2 | 2021-03-26T11:50:27.000Z | 2022-02-24T20:18:44.000Z | backend/tests/test_validation.py | ScilifelabDataCentre/project_portal | a274e9e8ac2f92972a240154afd73e6137bec9db | [
"BSD-3-Clause"
] | 120 | 2020-03-19T21:35:57.000Z | 2022-03-11T19:06:58.000Z | backend/tests/test_validation.py | ScilifelabDataCentre/Data-Tracker | 978f518ff91e0d7689b63d18fc280b8ef283c294 | [
"BSD-3-Clause"
] | null | null | null | """Tests for validation functions."""
import uuid
import pytest
# avoid pylint errors because of fixtures
# pylint: disable = redefined-outer-name, unused-import
from helpers import mdb
import validate
def test_validate_affiliation():
"""Confirm that only valid strings are accepted."""
validator = validate.VALIDATION_MAPPER["affiliation"]
assert validator("Test")
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_auth_ids():
"""Confirm that only valid lists of strings are accepted."""
validator = validate.VALIDATION_MAPPER["auth_ids"]
assert validator([])
assert validator(["Test"])
assert validator(["Test", "Test 2"])
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator("asd")
with pytest.raises(ValueError):
validator([1, 2, 3, 4])
with pytest.raises(ValueError):
validator(4.5)
def test_validate_authors(mdb):
"""Confirm that only valid users are accepted."""
validator = validate.VALIDATION_MAPPER["authors"]
test_users = [str(entry["_id"]) for entry in mdb["users"].aggregate([{"$sample": {"size": 5}}])]
assert validator([], db=mdb)
assert validator(test_users, db=mdb)
assert validator(test_users[:1], db=mdb)
with pytest.raises(ValueError):
validator(test_users[0], db=mdb)
with pytest.raises(ValueError):
validator([str(uuid.uuid4()) for _ in range(4)], db=mdb)
with pytest.raises(ValueError):
validator(5, db=mdb)
with pytest.raises(ValueError):
validator("asd", db=mdb)
with pytest.raises(ValueError):
validator([1, 2, 3, 4], db=mdb)
with pytest.raises(ValueError):
validator(4.5, db=mdb)
def test_validate_contact():
"""Confirm that only valid strings are accepted."""
validator = validate.VALIDATION_MAPPER["contact"]
assert validator("Test")
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_datasets(mdb):
"""Confirm that only valid users are accepted."""
validator = validate.VALIDATION_MAPPER["datasets"]
test_datasets = [
str(entry["_id"]) for entry in mdb["datasets"].aggregate([{"$sample": {"size": 5}}])
]
assert validator([], db=mdb)
assert validator(test_datasets, db=mdb)
assert validator(test_datasets[:1], db=mdb)
with pytest.raises(ValueError):
validator(test_datasets[0], db=mdb)
with pytest.raises(ValueError):
validator([str(uuid.uuid4()) for _ in range(4)], db=mdb)
with pytest.raises(ValueError):
validator(["not_an_uuid"], db=mdb)
with pytest.raises(ValueError):
validator(5, db=mdb)
with pytest.raises(ValueError):
validator("asd", db=mdb)
with pytest.raises(ValueError):
validator([1, 2, 3, 4], db=mdb)
with pytest.raises(ValueError):
validator(4.5, db=mdb)
def test_validate_description():
"""Confirm that only valid strings are accepted."""
validator = validate.VALIDATION_MAPPER["description"]
assert validator("Test")
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_editors(mdb):
"""Confirm that only valid users are accepted."""
validator = validate.VALIDATION_MAPPER["editors"]
test_users = [str(entry["_id"]) for entry in mdb["users"].aggregate([{"$sample": {"size": 5}}])]
assert validator(test_users, db=mdb)
assert validator(test_users[:1], db=mdb)
with pytest.raises(ValueError):
validator(test_users[0], db=mdb)
with pytest.raises(ValueError):
validator([str(uuid.uuid4()) for _ in range(4)], db=mdb)
with pytest.raises(ValueError):
validator(["invalid_uuid"], db=mdb)
with pytest.raises(ValueError):
validator(5, db=mdb)
with pytest.raises(ValueError):
validator("asd", db=mdb)
with pytest.raises(ValueError):
validator([1, 2, 3, 4], db=mdb)
with pytest.raises(ValueError):
validator(4.5, db=mdb)
def test_validate_email():
"""Confirm that "only" valid emails are accepted."""
validator = validate.VALIDATION_MAPPER["email"]
assert validator("")
assert validator("test@example.com")
assert validator("test.name@sub.example.com")
with pytest.raises(ValueError):
validator("test@localhost")
with pytest.raises(ValueError):
validator("test@localhost@localhost.com")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator("asd")
with pytest.raises(ValueError):
validator([1, 2, 3, 4])
with pytest.raises(ValueError):
validator(4.5)
def test_validate_field():
"""Confirm that the correct validation is run."""
validator = validate.validate_field
assert validator("permissions", ["DATA_EDIT"], testing=True)
assert validator("name", "Test", testing=True)
assert not validator("permissions", "DATA_EDIT", testing=True)
assert not validator("bad_key", [], testing=True)
def test_validate_generators(mdb):
"""Confirm that only valid users are accepted."""
validator = validate.VALIDATION_MAPPER["editors"]
test_users = [str(entry["_id"]) for entry in mdb["users"].aggregate([{"$sample": {"size": 5}}])]
assert validator([], db=mdb)
assert validator(test_users, db=mdb)
assert validator(test_users[:1], db=mdb)
with pytest.raises(ValueError):
validator(test_users[0], db=mdb)
with pytest.raises(ValueError):
validator([str(uuid.uuid4()) for _ in range(4)], db=mdb)
with pytest.raises(ValueError):
validator(["invalid_uuid"], db=mdb)
with pytest.raises(ValueError):
validator(5, db=mdb)
with pytest.raises(ValueError):
validator("asd", db=mdb)
with pytest.raises(ValueError):
validator([1, 2, 3, 4], db=mdb)
with pytest.raises(ValueError):
validator(4.5, db=mdb)
def test_validate_name():
"""Confirm that only valid strings are accepted."""
validator = validate.VALIDATION_MAPPER["name"]
assert validator("Test")
assert validator("Test Name")
with pytest.raises(ValueError):
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_orcid():
"""Confirm that only valid orcids are accepted."""
validator = validate.VALIDATION_MAPPER["orcid"]
assert validator("0123-4567-8901-2345")
assert validator("9999-9999-9999-9999")
with pytest.raises(ValueError):
validator({})
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
with pytest.raises(ValueError):
validator("999F-9999-9999-9999")
with pytest.raises(ValueError):
validator("1234-")
with pytest.raises(ValueError):
validator("1234-6789")
def test_validate_organisation(mdb):
"""Confirm that only valid users are accepted."""
validator = validate.VALIDATION_MAPPER["organisation"]
test_users = [str(entry["_id"]) for entry in mdb["users"].aggregate([{"$sample": {"size": 5}}])]
assert validator("", db=mdb)
assert validator(test_users[0], db=mdb)
assert validator(test_users[4], db=mdb)
with pytest.raises(ValueError):
validator(test_users, db=mdb)
with pytest.raises(ValueError):
validator(test_users[:1], db=mdb)
with pytest.raises(ValueError):
validator([str(uuid.uuid4()) for _ in range(4)], db=mdb)
with pytest.raises(ValueError):
validator(str(uuid.uuid4()), db=mdb)
with pytest.raises(ValueError):
validator(5, db=mdb)
with pytest.raises(ValueError):
validator("asd", db=mdb)
with pytest.raises(ValueError):
validator([1, 2, 3, 4], db=mdb)
with pytest.raises(ValueError):
validator(4.5, db=mdb)
def test_validate_permissions():
"""Confirm that only valid permission lists are accepted."""
validator = validate.VALIDATION_MAPPER["permissions"]
assert validator(["DATA_EDIT"])
assert validator(["DATA_EDIT", "USER_MANAGEMENT"])
assert validator([])
with pytest.raises(ValueError):
validator(["DATA_EDIT", "USER_MANAGEMENT", "DATA_EDIT"])
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator([1, 2, 3])
with pytest.raises(ValueError):
validator(["DATA_EDIT", 2, 3])
with pytest.raises(ValueError):
validator("DATA_EDIT")
with pytest.raises(ValueError):
validator({})
with pytest.raises(ValueError):
validator(["BAD_PERMISSION"])
with pytest.raises(ValueError):
validator(("DATA_EDIT",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_properties():
"""Confirm that only valid key:value pairs are accepted."""
validator = validate.VALIDATION_MAPPER["properties"]
assert validator({})
assert validator({"key": "value"})
assert validator({"long key": "long value"})
assert validator(
{
"key": "value",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"long key": "long value",
}
)
with pytest.raises(ValueError):
assert validator({"ke": "value"})
with pytest.raises(ValueError):
assert validator({"key": "va"})
with pytest.raises(ValueError):
assert validator({"key": " value"})
with pytest.raises(ValueError):
assert validator({"key": "value "})
with pytest.raises(ValueError):
assert validator({" key": "value"})
with pytest.raises(ValueError):
assert validator({"key ": "value"})
with pytest.raises(ValueError):
assert validator({1: "value"})
with pytest.raises(ValueError):
assert validator({"key": 1})
with pytest.raises(ValueError):
assert validator(["tag"])
with pytest.raises(ValueError):
assert validator("")
with pytest.raises(ValueError):
assert validator([])
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_tags():
"""Confirm that only valid tags are accepted."""
validator = validate.VALIDATION_MAPPER["tags"]
assert validator([])
assert validator(["test"])
assert validator(["test", "test2"])
with pytest.raises(ValueError):
assert validator({})
with pytest.raises(ValueError):
assert validator([""])
with pytest.raises(ValueError):
assert validator([" tag"])
with pytest.raises(ValueError):
assert validator(["tag "])
with pytest.raises(ValueError):
assert validator(["ta"])
with pytest.raises(ValueError):
assert validator([0])
with pytest.raises(ValueError):
assert validator([0, 1, 2, 3])
with pytest.raises(ValueError):
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_title():
"""Confirm that only valid strings are accepted."""
validator = validate.VALIDATION_MAPPER["title"]
assert validator("Test")
assert validator("Test With more WORdS")
with pytest.raises(ValueError):
assert validator("")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
def test_validate_url():
"""Confirm that urls start with http(s)://."""
validator = validate.VALIDATION_MAPPER["url"]
assert validator("")
assert validator("https://www.example.com/folder")
assert validator("http://www.example.com/folder")
assert validator("http://localhost")
with pytest.raises(ValueError):
validator("RandomTexthttps://www.example.com/folder")
with pytest.raises(ValueError):
validator("http:/")
with pytest.raises(ValueError):
validator("https:/")
with pytest.raises(ValueError):
validator("ftp://localhost")
with pytest.raises(ValueError):
validator("Test With more WORdS")
with pytest.raises(ValueError):
validator(5)
with pytest.raises(ValueError):
validator(["asd"])
with pytest.raises(ValueError):
validator(("asd",))
with pytest.raises(ValueError):
validator(4.5)
| 31.4375 | 100 | 0.646123 | 1,555 | 13,581 | 5.576206 | 0.082958 | 0.136086 | 0.217737 | 0.353823 | 0.8608 | 0.83589 | 0.780417 | 0.714681 | 0.684119 | 0.658286 | 0 | 0.017259 | 0.210736 | 13,581 | 431 | 101 | 31.510441 | 0.791678 | 0.070687 | 0 | 0.695015 | 0 | 0 | 0.082482 | 0.004232 | 0 | 0 | 0 | 0 | 0.208211 | 1 | 0.052786 | false | 0 | 0.01173 | 0 | 0.064516 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.