hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5cdbdbb33c2b728a97f8f9e70353204ac6883d30
| 10,874
|
py
|
Python
|
tests/test_python_comparison.py
|
pjaytycy/improcflow
|
4f9e40432436221690573b863c5fd8ab49bd9ac5
|
[
"MIT"
] | 1
|
2021-06-22T07:39:12.000Z
|
2021-06-22T07:39:12.000Z
|
tests/test_python_comparison.py
|
pjaytycy/improcflow
|
4f9e40432436221690573b863c5fd8ab49bd9ac5
|
[
"MIT"
] | 1
|
2018-02-08T20:50:53.000Z
|
2018-02-25T14:23:56.000Z
|
tests/test_python_comparison.py
|
pjaytycy/improcflow
|
4f9e40432436221690573b863c5fd8ab49bd9ac5
|
[
"MIT"
] | null | null | null |
import unittest
from django.test import TestCase
from improcflow.logic import *
class PythonComparisonTests(TestCase):
def test_equal_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_equal = PythonIsEqualTo()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_equal)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_equal.left)
flow.connect(element_input_2.data, element_equal.right)
flow.connect(element_equal.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertTrue(element_output.result())
def test_inequal_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_not_equal = PythonIsNotEqualTo()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_not_equal)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_not_equal.left)
flow.connect(element_input_2.data, element_not_equal.right)
flow.connect(element_not_equal.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertFalse(element_output.result())
def test_greater_with_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_greater = PythonIsGreaterThan()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_greater)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_greater.left)
flow.connect(element_input_2.data, element_greater.right)
flow.connect(element_greater.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertFalse(element_output.result())
def test_less_with_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_less = PythonIsLessThan()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_less)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_less.left)
flow.connect(element_input_2.data, element_less.right)
flow.connect(element_less.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertFalse(element_output.result())
def test_not_less_than_with_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_not_less = PythonIsNotLessThan()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_not_less)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_not_less.left)
flow.connect(element_input_2.data, element_not_less.right)
flow.connect(element_not_less.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertTrue(element_output.result())
def test_not_greater_than_with_integers(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_not_greater = PythonIsNotGreaterThan()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_not_greater)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_not_greater.left)
flow.connect(element_input_2.data, element_not_greater.right)
flow.connect(element_not_greater.result, element_output.data)
element_input_1.set_value(3)
element_input_2.set_value(5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(3)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(3)
element_input_2.set_value(-5)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(-5)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(-8)
element_input_2.set_value(-5)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(0)
element_input_2.set_value(0)
flow.run()
self.assertTrue(element_output.result())
class PythonLogialTests(TestCase):
def test_and_with_booleans(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_and = PythonAnd()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_and)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_and.left)
flow.connect(element_input_2.data, element_and.right)
flow.connect(element_and.result, element_output.data)
element_input_1.set_value(True)
element_input_2.set_value(True)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(True)
element_input_2.set_value(False)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(False)
element_input_2.set_value(True)
flow.run()
self.assertFalse(element_output.result())
element_input_1.set_value(False)
element_input_2.set_value(False)
flow.run()
self.assertFalse(element_output.result())
def test_or_with_booleans(self):
element_input_1 = InputData()
element_input_2 = InputData()
element_or = PythonOr()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input_1)
flow.add_element(element_input_2)
flow.add_element(element_or)
flow.add_element(element_output)
flow.connect(element_input_1.data, element_or.left)
flow.connect(element_input_2.data, element_or.right)
flow.connect(element_or.result, element_output.data)
element_input_1.set_value(True)
element_input_2.set_value(True)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(True)
element_input_2.set_value(False)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(False)
element_input_2.set_value(True)
flow.run()
self.assertTrue(element_output.result())
element_input_1.set_value(False)
element_input_2.set_value(False)
flow.run()
self.assertFalse(element_output.result())
def test_not_with_boolean(self):
element_input = InputData()
element_not = PythonNot()
element_output = OutputData()
flow = Flow()
flow.add_element(element_input)
flow.add_element(element_not)
flow.add_element(element_output)
flow.connect(element_input.data, element_not.input)
flow.connect(element_not.result, element_output.data)
element_input.set_value(True)
flow.run()
self.assertFalse(element_output.result())
element_input.set_value(False)
flow.run()
self.assertTrue(element_output.result())
| 29.231183
| 65
| 0.724113
| 1,502
| 10,874
| 4.86751
| 0.039281
| 0.231432
| 0.120914
| 0.096293
| 0.930516
| 0.902749
| 0.897552
| 0.897552
| 0.886883
| 0.842429
| 0
| 0.023068
| 0.170774
| 10,874
| 371
| 66
| 29.309973
| 0.787734
| 0
| 0
| 0.79402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152824
| 1
| 0.0299
| false
| 0
| 0.009967
| 0
| 0.046512
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7a252b3430c0ef985019d3b67f8fa67d9531d6b6
| 4,974
|
py
|
Python
|
pip_services3_expressions-3.3.4/test/csv/test_CsvTokenizer.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
pip_services3_expressions-3.3.4/test/csv/test_CsvTokenizer.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
pip_services3_expressions-3.3.4/test/csv/test_CsvTokenizer.py
|
pip-services3-python/pip-services3-expressions-python
|
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from pip_services3_expressions.csv.CsvTokenizer import CsvTokenizer
from pip_services3_expressions.tokenizers.Token import Token
from pip_services3_expressions.tokenizers.TokenType import TokenType
from test.tokenizers.TokenizerFixture import TokenizerFixture
class TestCsvTokenizer:
def test_tokenizer_with_default_parameters(self):
token_string = "\n\r\"John \"\"Da Man\"\"\",Repici,120 Jefferson St.,Riverside, NJ,08075\r\n" \
+ "Stephen,Tyler,\"7452 Terrace \"\"At the Plaza\"\" road\",SomeTown,SD, 91234\r" \
+ ",Blankman,,SomeTown, SD, 00298\n"
expected_tokens = [
Token(TokenType.Eol, "\n\r", 0, 0),
Token(TokenType.Quoted, "\"John \"\"Da Man\"\"\"", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "Repici", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "120 Jefferson St.", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "Riverside", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, " NJ", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "08075", 0, 0), Token(TokenType.Eol, "\r\n", 0, 0),
Token(TokenType.Word, "Stephen", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "Tyler", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Quoted, "\"7452 Terrace \"\"At the Plaza\"\" road\"", 0, 0),
Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "SomeTown", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "SD", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, " 91234", 0, 0), Token(TokenType.Eol, "\r", 0, 0),
Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "Blankman", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, "SomeTown", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, " SD", 0, 0), Token(TokenType.Symbol, ",", 0, 0),
Token(TokenType.Word, " 00298", 0, 0), Token(TokenType.Eol, "\n", 0, 0)
]
tokenizer = CsvTokenizer()
tokenizer.skip_eof = True
token_list = tokenizer.tokenize_buffer(token_string)
TokenizerFixture.assert_are_equals_token_lists(expected_tokens, token_list)
def test_tokenizer_with_overriden_parameters(self):
token_string = "\n\r\'John, \'\'Da Man\'\'\'\tRepici\t120 Jefferson St.\tRiverside\t NJ\t08075\r\n" \
+ "Stephen\t\"Tyler\"\t\'7452 \t\nTerrace \'\'At the Plaza\'\' road\'\tSomeTown\tSD\t 91234\r" \
+ "\tBlankman\t\tSomeTown \'xxx\t\'\t SD\t 00298\n"
expected_tokens = [
Token(TokenType.Eol, "\n\r", 0, 0),
Token(TokenType.Quoted, "\'John, \'\'Da Man\'\'\'", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "Repici", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "120 Jefferson St.", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "Riverside", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, " NJ", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "08075", 0, 0), Token(TokenType.Eol, "\r\n", 0, 0),
Token(TokenType.Word, "Stephen", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Quoted, "\"Tyler\"", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Quoted, "\'7452 \t\nTerrace \'\'At the Plaza\'\' road\'", 0, 0),
Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "SomeTown", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "SD", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, " 91234", 0, 0), Token(TokenType.Eol, "\r", 0, 0),
Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "Blankman", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, "SomeTown ", 0, 0), Token(TokenType.Quoted, "\'xxx\t\'", 0, 0),
Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, " SD", 0, 0), Token(TokenType.Symbol, "\t", 0, 0),
Token(TokenType.Word, " 00298", 0, 0), Token(TokenType.Eol, "\n", 0, 0)
]
# tokenizer = CsvTokenizer()
# tokenizer.field_separators = [ord('\t')]
# tokenizer.quote_symbols = [ord('\''), ord('\"')]
# tokenizer.end_of_line = "\n"
# tokenizer.skip_eof = True
# token_list = tokenizer.tokenize_buffer(token_string)
#
# TokenizerFixture.assert_are_equals_token_lists(expected_tokens, token_list)
| 62.175
| 119
| 0.570366
| 625
| 4,974
| 4.472
| 0.136
| 0.355635
| 0.172809
| 0.394991
| 0.821825
| 0.793202
| 0.774598
| 0.760286
| 0.760286
| 0.710912
| 0
| 0.061595
| 0.236228
| 4,974
| 79
| 120
| 62.962025
| 0.674125
| 0.064737
| 0
| 0.370968
| 0
| 0.032258
| 0.145196
| 0.014218
| 0
| 0
| 0
| 0
| 0.016129
| 1
| 0.032258
| false
| 0
| 0.064516
| 0
| 0.112903
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7a612f3e82a2656752111e0337ed470a466d866c
| 8,896
|
py
|
Python
|
vtr_flow/benchmarks/verilog/design_for_paper_jun2021/lstm_gen.py
|
aman26kbm/vtr-verilog-to-routin
|
031c394d0b6454e0c66f3f86f7cb78c87538c375
|
[
"MIT"
] | 1
|
2022-02-08T17:41:38.000Z
|
2022-02-08T17:41:38.000Z
|
vtr_flow/benchmarks/verilog/design_for_paper_jun2021/lstm_gen.py
|
aman26kbm/vtr-verilog-to-routin
|
031c394d0b6454e0c66f3f86f7cb78c87538c375
|
[
"MIT"
] | 1
|
2020-06-20T17:35:41.000Z
|
2020-06-20T17:36:48.000Z
|
vtr_flow/benchmarks/verilog/design_for_paper_jun2021/lstm_gen.py
|
aman26kbm/vtr-verilog-to-routin
|
031c394d0b6454e0c66f3f86f7cb78c87538c375
|
[
"MIT"
] | null | null | null |
# First type of MVM
# Multiplies 64x64 with 64x1
# 1 slice in the x direction
# 16 slices in the y direction
## We can multiply an MxN matrix with a Nx1 vector
## M comes from final_matmul_size
## N comes from b_data[47:40]
for iter in range(0,16):
print('''
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] first_out_{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] second_out_{iter};
wire done_mat_mul{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_in{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_in{iter}_NC;
assign b_data_in_{iter}_NC = 0;
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_out{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_out{iter}_NC;
wire [2*`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] c_data{iter};
wire c_data_available{iter};
wire [3:0] flags{iter}_NC;
wire [35:0] extra_out{iter}_NC;
// 63:48 47:32 31:24 23:16 15:0
assign b_data{iter} = {{16'b0, vector1[{iterp}*`DATA_WIDTH-1:{iter}*`DATA_WIDTH], vector_size, validity_mask_second_matrix, vector2[{iterp}*`DATA_WIDTH-1:{iter}*`DATA_WIDTH]}};
assign a_data{iter} = matrix1[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign a_data_in{iter} = matrix2[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign first_out_{iter} = c_data{iter}[`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:0];
assign second_out_{iter} = a_data_out{iter};
tensor_slice_int16 tensor_slice{iter}(
.clk(clk),
.reset(rst),
.pe_reset(rst),
.start_mat_mul(start_mat_mul),
.done_mat_mul_port(done_mat_mul{iter}),
.a_data(a_data{iter}), //first input matrix goes in here
.b_data(b_data{iter}), //b_data[63:48] -> first vector, b_data[47:40] -> N, b_data[31:16] -> second vector
.a_data_in(a_data_in{iter}), //second input matrix goes in here
.b_data_in(b_data_in{iter}_NC),
.c_data_out(c_data{iter}), //first output will come out here
.a_data_out(a_data_out{iter}), //this is where the second output will be
.b_data_out(b_data_out{iter}_NC),
.flags_port(flags{iter}_NC),
.c_data_available_port(c_data_available{iter}),
.validity_mask_a_rows(8'h0f),
.validity_mask_a_cols_b_rows(8'h0f),
.validity_mask_b_cols(8'h0f),
.slice_mode(`SLICE_MODE_TENSOR),
.slice_dtype(`DTYPE_INT16),
.op(3'b100), //matvec
.preload(1'b0),
.final_mat_mul_size(8'd4),
.a_loc(5'd0),
.b_loc(5'd0),
.no_rounding(1'b0),
.extra_out(extra_out{iter}_NC)
);
'''.format(iter=iter, iterp=iter+1))
for i in range(0,64):
print('''
{iter}: out = in[{iterp}*`DATA_WIDTH-1 : {iter}*`DATA_WIDTH];'''.format(iter=i, iterp=i+1))
print("SEARCHME")
# Second type of MVM
# Multiplies 64x100 with 100x1
# 16 slices and then 8 more slices
## We can multiply an MxN matrix with a Nx1 vector
## M comes from final_matmul_size
## N comes from b_data[47:40]
for iter in range(0,16):
print('''
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] first_out_{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] second_out_{iter};
wire done_mat_mul{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_in{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_in{iter}_NC;
assign b_data_in_{iter}_NC = 0;
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_out{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_out{iter}_NC;
wire [2*`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] c_data{iter};
wire c_data_available{iter};
wire [3:0] flags{iter}_NC;
wire [35:0] extra_out{iter}_NC;
// 63:48 47:32 31:24 23:16 15:0
assign b_data{iter} = {{16'b0, vector1[`DATA_WIDTH-1:0], vector_size, validity_mask_second_matrix, vector2[`DATA_WIDTH-1:0]}};
assign a_data{iter} = matrix1[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign a_data_in{iter} = matrix2[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign first_out_{iter} = c_data{iter}[`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:0];
assign second_out_{iter} = a_data_out{iter};
tensor_slice_int16 tensor_slice{iter}(
.clk(clk),
.reset(rst),
.pe_reset(rst),
.start_mat_mul(start_mat_mul),
.done_mat_mul_port(done_mat_mul{iter}),
.a_data(a_data{iter}), //first input matrix goes in here
.b_data(b_data{iter}), //b_data[63:48] -> first vector, b_data[47:40] -> N, b_data[31:16] -> second vector
.a_data_in(a_data_in{iter}), //second input matrix goes in here
.b_data_in(b_data_in{iter}_NC),
.c_data_out(c_data{iter}), //first output will come out here
.a_data_out(a_data_out{iter}), //this is where the second output will be
.b_data_out(b_data_out{iter}_NC),
.flags_port(flags{iter}_NC),
.c_data_available_port(c_data_available{iter}),
.validity_mask_a_rows(8'h0f),
.validity_mask_a_cols_b_rows(8'h0f),
.validity_mask_b_cols(8'h0f),
.slice_mode(`SLICE_MODE_TENSOR),
.slice_dtype(`DTYPE_INT16),
.op(3'b100), //matvec
.preload(1'b0),
.final_mat_mul_size(8'd4),
.a_loc(5'd0),
.b_loc(5'd0),
.no_rounding(1'b0),
.extra_out(extra_out{iter}_NC)
);
'''.format(iter=iter, iterp=iter+1))
for iter in range(0,8):
print('''
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] part_first_out_{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] part_second_out_{iter};
wire done_mat_mul_part{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_part{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_part{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_in_part{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_in_part{iter}_NC;
assign b_data_in_part{iter}_NC = 0;
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] a_data_out_part{iter};
wire [`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] b_data_out_part{iter}_NC;
wire [2*`INT16_MAT_MUL_SIZE*`INT16_DWIDTH-1:0] c_data_part{iter};
wire c_data_available_part{iter};
wire [3:0] flags_part{iter}_NC;
wire [35:0] extra_out_part{iter}_NC;
// 63:48 47:32 31:24 23:16 15:0
assign b_data_part{iter} = {{16'b0, vector3[`DATA_WIDTH-1:0], vector_size, validity_mask_second_matrix_part, vector4[`DATA_WIDTH-1:0]}};
assign a_data_part{iter} = matrix3[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign a_data_in_part{iter} = matrix4[{iterp}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:{iter}*`DATA_WIDTH*`INT16_MAT_MUL_SIZE];
assign part_first_out_{iter} = c_data_part{iter}[`DATA_WIDTH*`INT16_MAT_MUL_SIZE-1:0];
assign part_second_out_{iter} = a_data_out_part{iter};
tensor_slice_int16 tensor_slice_part{iter}(
.clk(clk),
.reset(rst),
.pe_reset(rst),
.start_mat_mul(start_mat_mul_part),
.done_mat_mul_port(done_mat_mul_part{iter}),
.a_data(a_data_part{iter}), //first input matrix goes in here
.b_data(b_data_part{iter}), //b_data[63:48] -> first vector, b_data[47:40] -> N, b_data[31:16] -> second vector
.a_data_in(a_data_in_part{iter}), //second input matrix goes in here
.b_data_in(b_data_in_part{iter}_NC),
.c_data_out(c_data_part{iter}), //first output will come out here
.a_data_out(a_data_out_part{iter}), //this is where the second output will be
.b_data_out(b_data_out_part{iter}_NC),
.flags_port(flags_part{iter}_NC),
.c_data_available_port(c_data_available_part{iter}),
.validity_mask_a_rows(8'h0f),
.validity_mask_a_cols_b_rows(8'h0f),
.validity_mask_b_cols(8'h0f),
.slice_mode(`SLICE_MODE_TENSOR),
.slice_dtype(`DTYPE_INT16),
.op(3'b100), //matvec
.preload(1'b0),
.final_mat_mul_size(8'd4),
.a_loc(5'd0),
.b_loc(5'd0),
.no_rounding(1'b0),
.extra_out(extra_out_part{iter}_NC)
);
'''.format(iter=iter, iterp=iter+1))
for m in range(0,8):
for i in range(0,4):
t = 4*m+i
print('assign tensor_result[{a}:{b}] = first_out_{m}[{msb}:{lsb}] + part_first_out_{m}[{msb}:{lsb}];'\
.format(a=16*(t+1)-1, b=16*t,\
m=m,msb=16*(i+1)-1, lsb=16*i))
for m in range(0,8):
for i in range(0,4):
t = 4*m+i
print('assign tensor_result[{a}:{b}] = first_out_{n}[{msb}:{lsb}] + part_second_out_{m}[{msb}:{lsb}];'\
.format(a=16*(t+1)-1+512, b=16*t+512,\
m=m,n=m+8,msb=16*(i+1)-1, lsb=16*i))
for i in range(0,64):
print('''
always @(posedge clk) begin
if (sel=={i}) begin
out{i} <= in;
end
end
'''.format(i=i))
for i in range(0,64):
print('wire [`DATA_WIDTH-1:0] out{i};'.format(i=i))
for i in range(0,64):
print('out{i},'.format(i=63-i))
| 40.072072
| 177
| 0.693458
| 1,638
| 8,896
| 3.412698
| 0.081807
| 0.064401
| 0.080501
| 0.112701
| 0.928623
| 0.901789
| 0.884436
| 0.835778
| 0.832916
| 0.815206
| 0
| 0.068545
| 0.142311
| 8,896
| 221
| 178
| 40.253394
| 0.664089
| 0.044177
| 0
| 0.729282
| 0
| 0.077348
| 0.908512
| 0.506956
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.055249
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7a794ac5b8411d86fa5646f2ae341983e09ae54c
| 237
|
py
|
Python
|
src/main/factories/__init__.py
|
panda-coder/py-clean-flask
|
e7b8af5056178cd1dc6161f52a909f8043dc4b66
|
[
"MIT"
] | null | null | null |
src/main/factories/__init__.py
|
panda-coder/py-clean-flask
|
e7b8af5056178cd1dc6161f52a909f8043dc4b66
|
[
"MIT"
] | null | null | null |
src/main/factories/__init__.py
|
panda-coder/py-clean-flask
|
e7b8af5056178cd1dc6161f52a909f8043dc4b66
|
[
"MIT"
] | null | null | null |
__all__ = ['make_sum_controller', 'make_subtract_controller', 'make_multiply_controller', 'make_divide_controller']
from .controllers import make_sum_controller, make_subtract_controller, make_multiply_controller, make_divide_controller
| 79
| 120
| 0.869198
| 28
| 237
| 6.642857
| 0.357143
| 0.451613
| 0.182796
| 0.225806
| 0.870968
| 0.870968
| 0.870968
| 0.870968
| 0.870968
| 0.870968
| 0
| 0
| 0.054852
| 237
| 3
| 120
| 79
| 0.830357
| 0
| 0
| 0
| 0
| 0
| 0.37395
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
7a93b280c3e6f32d93b643dbdef8c8ea5344b072
| 2,379
|
py
|
Python
|
controllers/api.py
|
chinapandaman/appZero
|
8e21eed1576dc9085c66206993c7a24caaa6db52
|
[
"MIT"
] | 2
|
2020-01-13T05:31:24.000Z
|
2020-03-24T02:12:02.000Z
|
controllers/api.py
|
chinapandaman/appZero
|
8e21eed1576dc9085c66206993c7a24caaa6db52
|
[
"MIT"
] | null | null | null |
controllers/api.py
|
chinapandaman/appZero
|
8e21eed1576dc9085c66206993c7a24caaa6db52
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from app_factory.factory import AppZeroFactory
@auth.allows_jwt()
@auth.requires_login()
@request.restful()
def template():
def GET(*args, **params):
if not params.get("api_name"):
raise HTTP(404)
return response.json(
AppZeroFactory(layer="api", component=params.pop("api_name"), db=db)
.build()
.get(
table_id=args[0] if len(args) else None,
additional_query=params if params else None,
)
)
def POST(*args, **params):
if not params.get("api_name"):
raise HTTP(404)
return response.json(
AppZeroFactory(layer="api", component=params.pop("api_name"), db=db)
.build()
.post(data=params)
)
def PUT(*args, **params):
if not params.get("api_name"):
raise HTTP(404)
if not len(args):
raise HTTP(400)
return response.json(
AppZeroFactory(layer="api", component=params.pop("api_name"), db=db)
.build()
.put(table_id=args[0], data=params)
)
def DELETE(*args, **params):
if not params.get("api_name"):
raise HTTP(404)
if not len(args):
raise HTTP(400)
return response.json(
AppZeroFactory(layer="api", component=params.pop("api_name"), db=db)
.build()
.delete(table_id=args[0])
)
return locals()
@auth.allows_jwt()
@auth.requires_login()
@request.restful()
def sections():
def GET(*args, **params):
if not len(args):
raise HTTP(400)
return response.json(
AppZeroFactory(
layer="view",
component="section/{component}".format(component=args[0]),
db=db,
)
.build()
.data
)
return locals()
@auth.allows_jwt()
@auth.requires_login()
@request.restful()
def pages():
def GET(*args, **params):
if not len(args):
raise HTTP(400)
return response.json(
AppZeroFactory(
layer="view",
component="page/{component}".format(component=args[0]),
db=db,
)
.build()
.data
)
return locals()
| 23.097087
| 80
| 0.509878
| 255
| 2,379
| 4.682353
| 0.215686
| 0.033501
| 0.060302
| 0.075377
| 0.806533
| 0.806533
| 0.801508
| 0.801508
| 0.801508
| 0.762144
| 0
| 0.019569
| 0.355612
| 2,379
| 102
| 81
| 23.323529
| 0.759296
| 0.008827
| 0
| 0.696203
| 0
| 0
| 0.050509
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113924
| true
| 0
| 0.012658
| 0
| 0.240506
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f89507fff8966d91f1f92a6bfd7812b6155b278
| 79
|
py
|
Python
|
7_pytest_hooks/test_hooks_example.py
|
Krevit/python_qa
|
225d3964c46247f651b0f9a7550e2c2b3244734e
|
[
"MIT"
] | null | null | null |
7_pytest_hooks/test_hooks_example.py
|
Krevit/python_qa
|
225d3964c46247f651b0f9a7550e2c2b3244734e
|
[
"MIT"
] | null | null | null |
7_pytest_hooks/test_hooks_example.py
|
Krevit/python_qa
|
225d3964c46247f651b0f9a7550e2c2b3244734e
|
[
"MIT"
] | null | null | null |
def test_one():
pass
def test_two():
pass
def test_three():
pass
| 8.777778
| 17
| 0.594937
| 12
| 79
| 3.666667
| 0.5
| 0.477273
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.291139
| 79
| 8
| 18
| 9.875
| 0.785714
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8ffa9354c1ac49f26cfb3436216d5781df0f36e3
| 5,940
|
gyp
|
Python
|
chrome/browser/resources/md_history/compiled_resources2.gyp
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/browser/resources/md_history/compiled_resources2.gyp
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chrome/browser/resources/md_history/compiled_resources2.gyp
|
metux/chromium-deb
|
3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'constants',
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'browser_service',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'externs',
'dependencies': [
'constants',
],
'includes': ['../../../../third_party/closure_compiler/include_js.gypi'],
},
{
'target_name': 'history_item',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_row',
'constants',
'browser_service',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'history_list',
'dependencies': [
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-a11y-announcer/compiled_resources2.gyp:iron-a11y-announcer-extracted',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-list/compiled_resources2.gyp:iron-list-extracted',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-scroll-threshold/compiled_resources2.gyp:iron-scroll-threshold-extracted',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_action_menu/compiled_resources2.gyp:cr_action_menu',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_lazy_render/compiled_resources2.gyp:cr_lazy_render',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'browser_service',
'constants',
'externs',
'history_item',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'history_toolbar',
'dependencies': [
'<(DEPTH)/ui/webui/resources/cr_elements/cr_toolbar/compiled_resources2.gyp:cr_toolbar',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_toolbar/compiled_resources2.gyp:cr_toolbar_selection_overlay',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'constants',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'history',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'app',
'constants',
'externs',
'<(EXTERNS_GYP):chrome_send',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'app',
'dependencies': [
'<(DEPTH)/ui/webui/resources/cr_elements/cr_drawer/compiled_resources2.gyp:cr_drawer',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:command',
'constants',
'externs',
'history_toolbar',
'history_list',
'side_bar',
'synced_device_card',
'synced_device_manager',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'query_manager',
'dependencies': [
'browser_service',
'externs',
'router',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'router',
'dependencies': [
'constants',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'side_bar',
'dependencies': [
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-a11y-keys-behavior/compiled_resources2.gyp:iron-a11y-keys-behavior-extracted',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/compiled_resources2.gyp:iron-selector-extracted',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-ripple/compiled_resources2.gyp:paper-ripple-extracted',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'browser_service',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'synced_device_card',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_row',
'browser_service',
'constants',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'synced_device_manager',
'dependencies': [
'<(DEPTH)/ui/webui/resources/cr_elements/cr_lazy_render/compiled_resources2.gyp:cr_lazy_render',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_action_menu/compiled_resources2.gyp:cr_action_menu',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_grid',
'browser_service',
'synced_device_card',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 40.135135
| 146
| 0.630471
| 636
| 5,940
| 5.622642
| 0.15566
| 0.15604
| 0.182047
| 0.146812
| 0.767338
| 0.746085
| 0.731823
| 0.731823
| 0.699385
| 0.669183
| 0
| 0.013938
| 0.190741
| 5,940
| 147
| 147
| 40.408163
| 0.729977
| 0.026094
| 0
| 0.5625
| 0
| 0.041667
| 0.723404
| 0.578101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
89048e3a4de50d20e43fea0bba2c855ef2b76e47
| 22,458
|
py
|
Python
|
video_feature_cal.py
|
founture123/LSAFGCMR
|
5e032e1e2e14710f6b058cd0a796ba335382c2b7
|
[
"MIT"
] | null | null | null |
video_feature_cal.py
|
founture123/LSAFGCMR
|
5e032e1e2e14710f6b058cd0a796ba335382c2b7
|
[
"MIT"
] | null | null | null |
video_feature_cal.py
|
founture123/LSAFGCMR
|
5e032e1e2e14710f6b058cd0a796ba335382c2b7
|
[
"MIT"
] | null | null | null |
import torch.nn.functional as F
from retrieval import *
post = [25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 7, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 23, 25, 23, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 19, 25, 24, 22, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 25, 22, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 21, 25, 24, 24, 25, 25, 24, 23, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 24, 18, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 24, 25, 19, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 18, 25, 25, 24, 25, 25, 22, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 23, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 20, 25, 25, 25, 25, 25, 24, 22, 25, 25, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 24, 12, 25, 25, 25, 25, 24, 16, 24, 24, 25, 8, 25, 24, 25, 25, 24, 25, 22, 25, 25, 17, 12, 25, 25, 25, 25, 25, 25, 25, 19, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 24, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 19, 25, 25, 25, 24, 25, 24, 25, 25, 24, 25, 23, 24, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 22, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 21, 25, 25, 25, 23, 25, 25, 25, 18, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 18, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 16, 25, 25, 25, 25, 23, 24, 25, 24, 24, 25, 25, 25, 25, 25, 24, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 24, 25, 22, 25, 25, 24, 25, 20, 25, 25, 20, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 18, 25, 19, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 21, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 23, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 22, 25, 25, 25, 6, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 22, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 24, 25, 25, 20, 25, 25, 25, 25, 24, 25, 25, 25, 24, 20, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 24, 24, 25, 23, 25, 25, 25, 22, 25, 25, 25, 24, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 23, 24, 16, 24, 14, 25, 25, 25, 25, 17, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 22, 25, 24, 25, 21, 25, 25, 25, 25, 25, 23, 25, 10, 25, 25, 24, 15, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 17, 25, 25, 25, 25, 25, 24, 25, 25, 1, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 23, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 17, 22, 25, 25, 25, 25, 24, 25, 24, 24, 23, 25, 25, 7, 25, 25, 25, 14, 25, 25, 25, 25, 25, 23, 24, 24, 25, 24, 25, 25, 21, 25, 25, 25, 25, 15, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 21, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 20, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 3, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 24, 24, 11, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 24, 23, 23, 25, 25, 25, 25, 24, 25, 24, 25, 25, 24, 15, 24, 25, 25, 25, 24, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 14, 25, 25, 25, 25, 25, 25, 16, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 23, 23, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 21, 24, 25, 25, 25, 24, 25, 24, 25, 25, 22, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 24, 24, 25, 25, 25, 25, 23, 25, 19, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 23, 25, 25, 25, 25, 25, 25, 25, 22, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 3, 23, 24, 25, 24, 25, 25, 23, 25, 25, 24, 21, 25, 25, 25, 22, 25, 25, 25, 25, 24, 25, 25, 23, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 24, 24, 20, 25, 25, 25, 25, 25, 24, 25, 25, 25, 21, 24, 25, 24, 25, 25, 23, 25, 25, 18, 24, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 23, 22, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 16, 25, 25, 24, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 24, 25, 20, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 22, 25, 25, 25, 24, 23, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 23, 24, 25, 24, 25, 23, 24, 24, 25, 25, 25, 24, 25, 24, 17, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 24, 25, 25, 25, 25, 25, 23, 25, 25, 25, 12, 22, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 19, 25, 24, 23, 24, 18, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 20, 17, 25, 25, 25, 25, 23, 25, 23, 25, 25, 24, 25, 25, 25, 24, 23, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 24, 25, 25, 25, 21, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 24, 25, 25, 25, 24, 22, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 24, 25, 22, 24, 24, 25, 23, 25, 25, 25, 24, 25, 25, 22, 25, 25, 19, 25, 25, 25, 25, 23, 24, 24, 25, 25, 24, 24, 25, 25, 25, 19, 23, 25, 18, 24, 25, 25, 25, 25, 25, 20, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 24, 23, 25, 25, 16, 24, 25, 25, 25, 25, 25, 11, 5, 25, 23, 25, 5, 25, 25, 25, 25, 24, 21, 24, 25, 12, 25, 25, 25, 23, 8, 25, 25, 25, 24, 24, 12, 25, 25, 25, 25, 4, 19, 25, 16, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 20, 25, 24, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 21, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 17, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 17, 25, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 22, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 17, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 20, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 23, 25, 25, 25, 25, 21, 21, 25, 24, 25, 25, 24, 25, 25, 25, 21, 25, 25, 25, 25, 24, 25, 24, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 24, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 23, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 19, 25, 25, 25, 24, 25, 24, 25, 25, 25, 23, 19, 25, 25, 25, 25, 25, 25, 24, 21, 25, 25, 24, 25, 25, 25, 24, 22, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 20, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 22, 25, 25, 25, 25, 25, 25, 25, 25, 21, 23, 18, 25, 25, 25, 17, 24, 24, 25, 25, 1, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 23, 25, 25, 25, 24, 23, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 21, 25, 24, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 18, 24, 25, 25, 25, 25, 25, 25, 25, 25, 20, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 16, 25, 25, 24, 25, 25, 25, 25, 21, 25, 24, 25, 17, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 21, 25, 25, 25, 25, 25, 2, 25, 24, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 2, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 22, 24, 24, 23, 22, 25, 25, 25, 25, 25, 21, 25, 23, 24, 25, 24, 25, 25, 23, 25, 23, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 23, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 11, 25, 24, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 18, 25, 25, 25, 25, 23, 25, 25, 25, 23, 25, 24, 24, 24, 25, 24, 25, 23, 25, 25, 25, 25, 21, 25, 25, 25, 18, 25, 25, 25, 25, 24, 25, 24, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 15, 25, 25, 25, 25, 25, 25, 25, 25, 25, 21, 24, 25, 24, 24, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 21, 25, 24, 25, 3, 25, 25, 25, 24, 25, 25, 25, 23, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 23, 24, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 7, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 19, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 23, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 8, 25, 25, 25, 25, 24, 14, 24, 25, 25, 25, 25, 24, 25, 24, 24, 19, 25, 25, 25, 25, 25, 18, 25, 24, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 22, 24, 24, 25, 24, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 20, 25, 24, 25, 25, 24, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 2, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 14, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 6, 25, 25, 25, 25, 25, 25, 19, 25, 25, 24, 23, 25, 25, 25, 25, 25, 25, 11, 22, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 20, 25, 25, 25, 21, 25, 24, 25, 25, 24, 25, 25, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 24, 25, 25, 21, 25, 25, 24, 25, 25, 25, 25, 6, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 24, 21, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 2, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 21, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 19, 25, 25, 24, 25, 25, 25, 24, 25, 24, 25, 25, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 11, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 24, 19, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 20, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 18, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 25, 24, 22, 24, 25, 23, 25, 24, 25, 25, 25, 25, 24, 8, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 1, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 14, 25, 25, 24, 25, 25, 25, 25, 24, 23, 25, 24, 25, 25, 25, 24, 21, 24, 23, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 17, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 23, 24, 25, 25, 25, 25, 25, 25, 4, 25, 25, 25, 23, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 24, 22, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 21, 25, 25, 24, 24, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 23, 20, 25, 25, 25, 16, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 20, 25, 24, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 19, 25, 25, 24, 25, 25, 21, 23, 25, 23, 20, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 24, 24, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 18, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 24, 24, 24, 25, 25, 25, 25, 24, 25, 24, 23, 25, 24, 25, 4, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 22, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 22, 21, 25, 16, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 19, 11, 23, 25, 11, 24, 25, 25, 25, 25, 24, 25, 25, 24, 7, 25, 25, 25, 24, 25, 25, 25, 11, 24, 25, 25, 25, 14, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 21, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 23, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 24, 24, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 22, 24, 13, 25, 25, 22, 24, 24, 25, 25, 25, 25, 25, 25, 24, 25, 21, 25, 25, 25, 25, 25, 24, 25, 25, 25, 22, 23, 25, 23, 25, 25, 25, 25, 25, 25, 23, 24, 25, 24, 25, 24, 24, 25, 25, 22, 25, 25, 23, 23, 24, 24, 25, 22, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 23, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 24, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 18, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 10, 25, 20, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 11, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 24, 1, 25, 25, 25, 25, 25, 25, 25, 25, 25, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 9, 25, 25, 25, 24, 25, 25, 22, 23, 25, 25, 24, 25, 24, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 24, 25, 25, 24, 25, 25, 18, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 8, 25, 25, 25, 24, 25, 19, 25, 17, 25, 25, 25, 25, 25, 24, 25, 25, 25, 3, 25, 23, 25, 25, 25, 25, 25, 25, 21, 25, 25, 25, 25, 25, 25, 25, 16, 22, 24, 25, 24, 25, 24, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 9, 25, 25, 22, 25, 25, 19, 25, 24, 25, 24, 22, 25, 25, 25, 25, 24, 25, 25, 24, 24, 25, 25, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 24, 25, 25, 23, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 22, 25, 25, 24, 24, 23, 25, 25, 25, 25, 25, 25, 23, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 5, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 16, 25, 24, 24, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 5, 25, 25, 25, 25, 25, 25, 25, 25, 2, 24, 19, 24, 25, 25, 25, 25, 25, 25, 23, 23, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 20, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 18, 21, 10, 25, 24, 22, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 24, 25, 25, 25, 25, 1, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 25, 25, 25, 12, 25, 25, 24, 24, 24, 25, 24, 25, 25, 21, 25, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 23, 25, 24, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 24, 24, 25, 25, 24, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 24, 25, 25, 21, 24, 25, 25, 24, 1, 24, 24, 25, 24, 25, 24, 24, 25, 24, 24, 24, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 23, 25, 25, 25, 24, 25, 25, 25, 25, 25, 22, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 24, 25, 25, 25, 14, 25, 25, 25, 25, 25, 25, 24, 25, 25, 24, 21, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 23, 24, 25, 24, 25, 25, 22, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 22, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 22, 25, 24, 25, 25, 25, 24, 25, 23, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 23, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 17, 25, 23, 25, 25, 25, 25, 25, 25, 25, 25, 22, 17, 25, 25, 25, 25, 25, 25, 25, 25, 23, 25, 25, 25, 25, 25, 24, 25, 25, 16, 25, 25, 25, 24, 25, 24, 25, 25, 25, 23, 25, 25, 21, 25, 24, 24, 24, 25, 25, 25, 25, 25, 19, 25, 24, 23, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 24, 24, 25, 24, 23, 25, 24, 24, 9, 25, 25, 25, 24, 25, 25, 25, 24, 25, 25, 24, 25, 24, 25, 25, 25, 23, 22, 25, 23, 24, 25, 25, 25, 25, 25, 25, 22, 25, 25, 25, 25, 25, 24, 24, 25, 25, 24, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 25, 25, 25, 25, 23, 25, 25, 25, 24, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 24, 25, 24, 25, 20, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 25, 24, 20, 24, 25, 25, 25, 25, 25, 25, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 19, 25, 25, 25, 25, 25, 25, 25, 25, 25, 24, 25, 25, 25, 25, 25, 25, 25, 24, 25, 24, 24, 25, 25, 24, 24, 25, 25, 25, 24, 25, 21, 25, 25, 25, 25, 24, 24]
os.environ["CUDA_VISIBLE_DEVICES"] = "3"
def mean():
pop = []
outs = np.loadtxt("vector_video_feature/san_out_video/features_te.txt", dtype=np.float64)
print(outs.shape)
m = 0
i = 0
flage = True
sum = 0
num = 0
for k in post:
sum += k
f = np.zeros((sum, 200))
for out in outs:
out = torch.Tensor(out).cuda().reshape(1,200)
i += 1
if i == post[m]:
i = 0
m += 1
outs = torch.cat((out, outs), 0)
# print("outs",outs.size())
output = torch.mean(outs, 0).reshape(1, 200)
output = F.softmax(output, dim=1).detach().cpu().numpy()
# print(output)
num += output.shape[0]
# print(m)
if ((m - 1) == len(post) - 1):
f[(m - 1) :num, :] = output
else:
f[(m - 1) :(m) , :] = output
flage = True
else:
if flage:
outs = out
flage = False
else:
outs = torch.cat((out,outs),0)
np.savetxt('vector_video_feature/san_calout_video/features_te.txt', f[:num, :])
if __name__ == '__main__':
mean()
| 431.884615
| 21,131
| 0.496438
| 5,455
| 22,458
| 2.040147
| 0.015949
| 1.148351
| 1.344056
| 1.402462
| 0.941594
| 0.936023
| 0.917872
| 0.897385
| 0.870159
| 0.834576
| 0
| 0.635817
| 0.259551
| 22,458
| 52
| 21,132
| 431.884615
| 0.033436
| 0.002137
| 0
| 0.225
| 0
| 0
| 0.005904
| 0.004607
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.05
| 0
| 0.075
| 0.025
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
8f462aa29efdda8790e9b23477f496d3cce69684
| 6,297
|
py
|
Python
|
BeMAp_package/identification/make_each_fasta.py
|
yusuketsuda/BeMAp
|
b64608730e5a819f83170e34c72a7b3d609ff12c
|
[
"MIT"
] | null | null | null |
BeMAp_package/identification/make_each_fasta.py
|
yusuketsuda/BeMAp
|
b64608730e5a819f83170e34c72a7b3d609ff12c
|
[
"MIT"
] | null | null | null |
BeMAp_package/identification/make_each_fasta.py
|
yusuketsuda/BeMAp
|
b64608730e5a819f83170e34c72a7b3d609ff12c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import pandas as pd
import logging
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
logger = logging.getLogger('LogBeMAp').getChild('sub')
fmt = "%(asctime)s %(levelname)s %(name)s :%(message)s"
logging.basicConfig(level=logging.INFO, format=fmt)
def make_each_fasta(accession, genbank, fasta, temp,s=True):
'''
s : If True, save fasta files (default: True)
'''
try:
record = SeqIO.read(genbank, 'genbank')
record_fasta = SeqIO.read(fasta,'fasta')
product_gene_note = []
location = []
strand = []
for j in range(len(record.features)):
location_each = []
if record.features[j].type == 'gene':
if j != range(len(record.features))[-1]:
if record.features[j].location != record.features[j+1].location:
if record.features[j].type == 'gene':
if 'product' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['product'][0])
strand.append(record.features[j].location.strand)
elif 'gene' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['gene'][0])
strand.append(record.features[j].location.strand)
elif 'note' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['note'][0])
strand.append(record.features[j].location.strand)
elif 'locus_tag' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['locus_tag'][0])
strand.append(record.features[j].location.strand)
else:
print('gene',record.features[j].qualifiers)
for l in range(len(record.features[j].location.parts)):
if 'product' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
elif 'gene' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
elif 'note' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
elif 'locus_tag' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
else:
print('gene_location',record.features[j].qualifiers)
location.append(location_each)
elif record.features[j].type == 'CDS':
if 'product' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['product'][0])
strand.append(record.features[j].location.strand)
elif 'gene' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['gene'][0])
strand.append(record.features[j].location.strand)
elif 'note' in record.features[j].qualifiers:
product_gene_note.append(record.features[j].qualifiers['note'][0])
strand.append(record.features[j].location.strand)
else:
print(record.features[j].qualifiers)
for l in range(len(record.features[j].location.parts)):
if 'product' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
elif 'gene' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
elif 'note' in record.features[j].qualifiers:
location_each.append([record.features[j].location.parts[l].start,record.features[j].location.parts[l].end])
else:
print('location',record.features[j].qualifiers)
location.append(location_each)
else:
continue
try:
product_location = pd.DataFrame({'product':product_gene_note, 'location':location,'strand':strand})
for k in product_location.index:
if len(product_location.loc[k,'location']) == 1:
if s:
seq =SeqRecord(record_fasta[product_location.loc[k,'location'][0][0]:product_location.loc[k,'location'][0][1]].seq, id = accession + '_CDS_'+ str(k) + '_strand_'+str(product_location.loc[k,'strand'])+ '||', description = product_location.loc[k,'product'])
SeqIO.write(seq, temp + '/each_fasta/' + accession + '/' + str(k) +'.fasta','fasta')
else:
if s:
seq = SeqRecord(record_fasta[product_location.loc[k,'location'][0][0]:product_location.loc[k,'location'][0][1]].seq+record_fasta[product_location.loc[k,'location'][1][0]:product_location.loc[k,'location'][1][1]].seq, id = accession + '_CDS_'+ str(k)+'_strand_'+str(product_location.loc[k,'strand'])+ '||', description = product_location.loc[k,'product'])
SeqIO.write(seq, temp + '/each_fasta/' + accession + '/' + str(k) +'.fasta','fasta')
return [accession, k]
except:
logging.warning(accession + ' is excluded')
except:
logging.warning(accession + ' has no record')
| 65.59375
| 378
| 0.549309
| 682
| 6,297
| 4.982405
| 0.124633
| 0.226604
| 0.233961
| 0.183932
| 0.779871
| 0.772808
| 0.755739
| 0.729253
| 0.724544
| 0.68864
| 0
| 0.005106
| 0.315706
| 6,297
| 95
| 379
| 66.284211
| 0.783476
| 0.009846
| 0
| 0.62069
| 0
| 0
| 0.069143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011494
| false
| 0
| 0.057471
| 0
| 0.08046
| 0.045977
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
56c1ab571861a8cc840d363d0093708444fd8073
| 272,616
|
py
|
Python
|
scripts/external_libs/scapy-2.4.5/scapy/contrib/automotive/volkswagen/definitions.py
|
dariusgrassi/trex-core
|
3b19ddcf67e33934f268b09d3364cd87275d48db
|
[
"Apache-2.0"
] | 250
|
2016-12-29T02:43:04.000Z
|
2022-03-31T05:51:23.000Z
|
scripts/external_libs/scapy-2.4.5/scapy/contrib/automotive/volkswagen/definitions.py
|
dariusgrassi/trex-core
|
3b19ddcf67e33934f268b09d3364cd87275d48db
|
[
"Apache-2.0"
] | 2
|
2017-08-08T06:22:10.000Z
|
2021-05-22T01:59:43.000Z
|
scripts/external_libs/scapy-2.4.5/scapy/contrib/automotive/volkswagen/definitions.py
|
dariusgrassi/trex-core
|
3b19ddcf67e33934f268b09d3364cd87275d48db
|
[
"Apache-2.0"
] | 86
|
2016-12-29T06:39:34.000Z
|
2021-12-12T20:07:39.000Z
|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Nils Weiss <nils@we155.de>
# Copyright (C) Jonas Schmidt <jonas.schmidt@st.othr.de>
# This program is published under a GPLv2 license
# scapy.contrib.description = Volkswagen specific definitions for UDS
# scapy.contrib.status = skip
from scapy.contrib.automotive.uds import UDS_RDBI, UDS_RC, UDS_RD
UDS_RDBI.dataIdentifiers[0x00bd] = "Theft Protection - Download GFA-Key"
UDS_RDBI.dataIdentifiers[0x00be] = "Theft Protection - Download IKA-Key"
UDS_RDBI.dataIdentifiers[0x00fd] = "IUMPR-ID3"
UDS_RDBI.dataIdentifiers[0x00fe] = "IUMPR-ID2"
UDS_RDBI.dataIdentifiers[0x00ff] = "IUMPR-ID1"
UDS_RDBI.dataIdentifiers[0x02cc] = "Vehicle_identification_number_provisional"
UDS_RDBI.dataIdentifiers[0x02e0] = "Immobilizer - Challenge"
UDS_RDBI.dataIdentifiers[0x02e1] = "Immobilizer - Login"
UDS_RDBI.dataIdentifiers[0x02e2] = "Immobilizer - Download Powertrain"
UDS_RDBI.dataIdentifiers[0x02e3] = "Immobilizer - Download IMS"
UDS_RDBI.dataIdentifiers[0x02e4] = "Transponder ID current Key"
UDS_RDBI.dataIdentifiers[0x02e5] = "Transponder ID Key 1"
UDS_RDBI.dataIdentifiers[0x02e6] = "Transponder ID Key 2"
UDS_RDBI.dataIdentifiers[0x02e7] = "Transponder ID Key 3"
UDS_RDBI.dataIdentifiers[0x02e8] = "Transponder ID Key 4"
UDS_RDBI.dataIdentifiers[0x02e9] = "Transponder ID Key 5"
UDS_RDBI.dataIdentifiers[0x02ea] = "Transponder ID Key 6"
UDS_RDBI.dataIdentifiers[0x02eb] = "Transponder ID Key 7"
UDS_RDBI.dataIdentifiers[0x02ec] = "Transponder ID Key 8"
UDS_RDBI.dataIdentifiers[0x02ed] = "State of Immobilizer"
UDS_RDBI.dataIdentifiers[0x02ee] = "State of Immobilizer Slaves"
UDS_RDBI.dataIdentifiers[0x02ef] = "State Blocking Time"
UDS_RDBI.dataIdentifiers[0x02f1] = "Immobilizer - Slave Login"
UDS_RDBI.dataIdentifiers[0x02f6] = "Download WFS SHE"
UDS_RDBI.dataIdentifiers[0x02f9] = "CRC32 Checksum of FAZIT Identification String"
UDS_RDBI.dataIdentifiers[0x02fa] = "Adapted_transponders_checksum"
UDS_RDBI.dataIdentifiers[0x02fb] = "Immobilizer - Download WFS 4"
UDS_RDBI.dataIdentifiers[0x02ff] = "Immobilizer_snapshot"
UDS_RDBI.dataIdentifiers[0x0407] = "VW Logical Software Block Counter Of Programming Attempts"
UDS_RDBI.dataIdentifiers[0x040f] = "VW Logical Software Block Lock Value"
UDS_RDBI.dataIdentifiers[0x0410] = "Bootloader TP Blocksize"
UDS_RDBI.dataIdentifiers[0x04a3] = "Gateway Component List"
UDS_RDBI.dataIdentifiers[0x0600] = "VW Coding Value"
UDS_RDBI.dataIdentifiers[0x0610] = "Control_unit_for_wiper_motor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0611] = "Slave_list_VW_spare_part_number"
UDS_RDBI.dataIdentifiers[0x0612] = "Slave_list_VW_software_version_number"
UDS_RDBI.dataIdentifiers[0x0613] = "Slave_list_VW_ecu_hardware_version_number"
UDS_RDBI.dataIdentifiers[0x0614] = "Slave_list_VW_hardware_number"
UDS_RDBI.dataIdentifiers[0x0615] = "Slave_list_ecu_serial_number"
UDS_RDBI.dataIdentifiers[0x0616] = "Slave_list_VW_FAZIT_identification_string"
UDS_RDBI.dataIdentifiers[0x0617] = "Slave_list_VW_system_name_or_engine_type"
UDS_RDBI.dataIdentifiers[0x0618] = "Left_rear_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0619] = "Right_rear_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x061a] = "Slave_component_list"
UDS_RDBI.dataIdentifiers[0x061b] = "Slave_component_list_databus_identification"
UDS_RDBI.dataIdentifiers[0x061c] = "Slave_component_list_ecu_identification"
UDS_RDBI.dataIdentifiers[0x061d] = "Slave_component_list_present"
UDS_RDBI.dataIdentifiers[0x061e] = "Right_headlamp_power_output_stage_Coding_Values"
UDS_RDBI.dataIdentifiers[0x061f] = "Sensor_for_anti_theft_alarm_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0620] = "Rear_lid_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0621] = "Alarm_horn_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0622] = "Automatic_day_night_interior_mirror_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0623] = "Sun_roof_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0624] = "Steering_column_lock_actuator_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0625] = "Anti_theft_tilt_system_control_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0626] = "Tire_pressure_monitor_antenna_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0627] = "Heated_windshield_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0628] = "Rear_light_left_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0629] = "Ceiling_light_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062a] = "Left_front_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062b] = "Right_front_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062c] = "Control_module_for_auxiliary_air_heater_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062d] = "Ioniser_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062e] = "Multi_function_steering_wheel_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x062f] = "Left_rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0630] = "Right_rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0631] = "Left_rear_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0632] = "Right_rear_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0633] = "Display_unit_1_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0634] = "Battery_monitoring_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0635] = "Roof_blind_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0636] = "Sun_roof_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0637] = "Display_unit_2_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0638] = "Telephone_handset_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0639] = "Traffic_data_aerial_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063a] = "Chip_card_reader_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063b] = "Hands_free_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063c] = "Telephone_handset_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063d] = "Display_unit_front_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063e] = "Multimedia_operating_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x063f] = "Digital_sound_system_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x0640] = "Control_unit_for_wiper_motor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0641] = "Rain_light_recognition_sensor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0642] = "Light_switch_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0643] = "Garage_door_opener_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0644] = "Garage_door_opener_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0645] = "Ignition_key_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0646] = "Left_front_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0647] = "Right_front_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0648] = "Left_rear_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0649] = "Right_rear_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064a] = "Data_medium_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064b] = "Drivers_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064c] = "Front_passengers_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064d] = "Left_headlamp_power_output_stage_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064e] = "Right_headlamp_power_output_stage_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x064f] = "Sensor_for_anti_theft_alarm_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0650] = "Rear_lid_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0651] = "Alarm_horn_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0652] = "Automatic_day_night_interior_mirror_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0653] = "Sun_roof_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0654] = "Steering_column_lock_actuator_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0655] = "Anti_theft_tilt_system_control_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0656] = "Tire_pressure_monitor_antenna_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0657] = "Heated_windshield_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0658] = "Rear_light_left_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0659] = "Ceiling_light_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065a] = "Left_front_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065b] = "Right_front_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065c] = "Control_module_for_auxiliary_air_heater_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065d] = "Ioniser_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065e] = "Multi_function_steering_wheel_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x065f] = "Left_rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0660] = "Right_rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0661] = "Left_rear_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0662] = "Right_rear_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0663] = "Display_unit_1_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0664] = "Battery_monitoring_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0665] = "Roof_blind_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0666] = "Sun_roof_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0667] = "Display_unit_2_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0668] = "Telephone_handset_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0669] = "Traffic_data_aerial_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066a] = "Chip_card_reader_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066b] = "Hands_free_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066c] = "Telephone_handset_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066d] = "Display_unit_front_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066e] = "Multimedia_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x066f] = "Digital_sound_system_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x0670] = "Control_unit_for_wiper_motor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0671] = "Rain_light_recognition_sensor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0672] = "Light_switch_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0673] = "Garage_door_opener_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0674] = "Garage_door_opener_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0675] = "Ignition_key_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0676] = "Left_front_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0677] = "Right_front_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0678] = "Left_rear_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0679] = "Right_rear_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067a] = "Data_medium_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067b] = "Drivers_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067c] = "Front_passengers_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067d] = "Left_headlamp_power_output_stage_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067e] = "Right_headlamp_power_output_stage_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x067f] = "Sensor_for_anti_theft_alarm_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0680] = "Rear_lid_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0681] = "Alarm_horn_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0682] = "Automatic_day_night_interior_mirror_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0683] = "Sun_roof_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0684] = "Steering_column_lock_actuator_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0685] = "Anti_theft_tilt_system_control_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0686] = "Tire_pressure_monitor_antenna_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0687] = "Heated_windshield_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0688] = "Rear_light_left_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0689] = "Ceiling_light_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068a] = "Left_front_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068b] = "Right_front_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068c] = "Control_module_for_auxiliary_air_heater_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068d] = "Ioniser_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068e] = "Multi_function_steering_wheel_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x068f] = "Left_rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0690] = "Right_rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0691] = "Left_rear_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0692] = "Right_rear_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0693] = "Display_unit_1_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0694] = "Battery_monitoring_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0695] = "Roof_blind_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0696] = "Sun_roof_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0697] = "Display_unit_2_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0698] = "Telephone_handset_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x0699] = "Traffic_data_aerial_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069a] = "Chip_card_reader_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069b] = "Hands_free_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069c] = "Telephone_handset_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069d] = "Display_unit_front_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069e] = "Multimedia_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x069f] = "Digital_sound_system_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x06a0] = "Control_unit_for_wiper_motor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a1] = "Rain_light_recognition_sensor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a2] = "Light_switch_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a3] = "Garage_door_opener_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a4] = "Garage_door_opener_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a5] = "Ignition_key_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a6] = "Left_front_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a7] = "Right_front_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a8] = "Left_rear_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06a9] = "Right_rear_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06aa] = "Data_medium_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ab] = "Drivers_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ac] = "Front_passengers_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ad] = "Left_headlamp_power_output_stage_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ae] = "Right_headlamp_power_output_stage_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06af] = "Sensor_for_anti_theft_alarm_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b0] = "Rear_lid_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b1] = "Alarm_horn_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b2] = "Automatic_day_night_interior_mirror_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b3] = "Sun_roof_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b4] = "Steering_column_lock_actuator_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b5] = "Anti_theft_tilt_system_control_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b6] = "Tire_pressure_monitor_antenna_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b7] = "Heated_windshield_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b8] = "Rear_light_left_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06b9] = "Ceiling_light_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ba] = "Left_front_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06bb] = "Right_front_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06bc] = "Control_module_for_auxiliary_air_heater_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06bd] = "Ioniser_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06be] = "Multi_function_steering_wheel_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06bf] = "Left_rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c0] = "Right_rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c1] = "Left_rear_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c2] = "Right_rear_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c3] = "Display_unit_1_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c4] = "Battery_monitoring_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c5] = "Roof_blind_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c6] = "Sun_roof_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c7] = "Display_unit_2_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c8] = "Telephone_handset_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06c9] = "Traffic_data_aerial_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ca] = "Chip_card_reader_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06cb] = "Hands_free_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06cc] = "Telephone_handset_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06cd] = "Display_unit_front_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06ce] = "Multimedia_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06cf] = "Digital_sound_system_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x06d0] = "Control_unit_for_wiper_motor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d1] = "Rain_light_recognition_sensor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d2] = "Light_switch_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d3] = "Garage_door_opener_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d4] = "Garage_door_opener_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d5] = "Ignition_key_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d6] = "Left_front_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d7] = "Right_front_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d8] = "Left_rear_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06d9] = "Right_rear_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06da] = "Data_medium_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06db] = "Drivers_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06dc] = "Front_passengers_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06dd] = "Left_headlamp_power_output_stage_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06de] = "Right_headlamp_power_output_stage_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06df] = "Sensor_for_anti_theft_alarm_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e0] = "Rear_lid_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e1] = "Alarm_horn_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e2] = "Automatic_day_night_interior_mirror_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e3] = "Sun_roof_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e4] = "Steering_column_lock_actuator_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e5] = "Anti_theft_tilt_system_control_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e6] = "Tire_pressure_monitor_antenna_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e7] = "Heated_windshield_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e8] = "Rear_light_left_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06e9] = "Ceiling_light_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ea] = "Left_front_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06eb] = "Right_front_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ec] = "Control_module_for_auxiliary_air_heater_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ed] = "Ioniser_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ee] = "Multi_function_steering_wheel_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ef] = "Left_rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f0] = "Right_rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f1] = "Left_rear_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f2] = "Right_rear_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f3] = "Display_unit_1_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f4] = "Battery_monitoring_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f5] = "Roof_blind_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f6] = "Sun_roof_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f7] = "Display_unit_2_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f8] = "Telephone_handset_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06f9] = "Traffic_data_aerial_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06fa] = "Chip_card_reader_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06fb] = "Hands_free_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06fc] = "Telephone_handset_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06fd] = "Display_unit_front_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06fe] = "Multimedia_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x06ff] = "Digital_sound_system_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x0700] = "Control_unit_for_wiper_motor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0701] = "Rain_light_recognition_sensor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0702] = "Light_switch_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0703] = "Garage_door_opener_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0704] = "Garage_door_opener_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0705] = "Ignition_key_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0706] = "Left_front_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0707] = "Right_front_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0708] = "Left_rear_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0709] = "Right_rear_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070a] = "Data_medium_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070b] = "Drivers_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070c] = "Front_passengers_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070d] = "Left_headlamp_power_output_stage_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070e] = "Right_headlamp_power_output_stage_Serial_Number"
UDS_RDBI.dataIdentifiers[0x070f] = "Sensor_for_anti_theft_alarm_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0710] = "Rear_lid_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0711] = "Alarm_horn_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0712] = "Automatic_day_night_interior_mirror_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0713] = "Sun_roof_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0714] = "Steering_column_lock_actuator_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0715] = "Anti_theft_tilt_system_control_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0716] = "Tire_pressure_monitor_antenna_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0717] = "Heated_windshield_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0718] = "Rear_light_left_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0719] = "Ceiling_light_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071a] = "Left_front_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071b] = "Right_front_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071c] = "Control_module_for_auxiliary_air_heater_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071d] = "Ioniser_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071e] = "Multi_function_steering_wheel_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x071f] = "Left_rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0720] = "Right_rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0721] = "Left_rear_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0722] = "Right_rear_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0723] = "Display_unit_1_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0724] = "Battery_monitoring_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0725] = "Roof_blind_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0726] = "Sun_roof_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0727] = "Display_unit_2_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0728] = "Telephone_handset_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0729] = "Traffic_data_aerial_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072a] = "Chip_card_reader_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072b] = "Hands_free_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072c] = "Telephone_handset_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072d] = "Display_unit_front_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072e] = "Multimedia_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x072f] = "Digital_sound_system_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x0730] = "Control_unit_for_wiper_motor_System_Name"
UDS_RDBI.dataIdentifiers[0x0731] = "Rain_light_recognition_sensor_System_Name"
UDS_RDBI.dataIdentifiers[0x0732] = "Light_switch_System_Name"
UDS_RDBI.dataIdentifiers[0x0733] = "Garage_door_opener_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0734] = "Garage_door_opener_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x0735] = "Ignition_key_System_Name"
UDS_RDBI.dataIdentifiers[0x0736] = "Left_front_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0737] = "Right_front_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0738] = "Left_rear_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0739] = "Right_rear_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x073a] = "Data_medium_System_Name"
UDS_RDBI.dataIdentifiers[0x073b] = "Drivers_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x073c] = "Front_passengers_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x073d] = "Left_headlamp_power_output_stage_System_Name"
UDS_RDBI.dataIdentifiers[0x073e] = "Right_headlamp_power_output_stage_System_Name"
UDS_RDBI.dataIdentifiers[0x073f] = "Sensor_for_anti_theft_alarm_system_System_Name"
UDS_RDBI.dataIdentifiers[0x0740] = "Rear_lid_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x0741] = "Alarm_horn_System_Name"
UDS_RDBI.dataIdentifiers[0x0742] = "Automatic_day_night_interior_mirror_System_Name"
UDS_RDBI.dataIdentifiers[0x0743] = "Sun_roof_System_Name"
UDS_RDBI.dataIdentifiers[0x0744] = "Steering_column_lock_actuator_System_Name"
UDS_RDBI.dataIdentifiers[0x0745] = "Anti_theft_tilt_system_control_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x0746] = "Tire_pressure_monitor_antenna_System_Name"
UDS_RDBI.dataIdentifiers[0x0747] = "Heated_windshield_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0748] = "Rear_light_left_1_System_Name"
UDS_RDBI.dataIdentifiers[0x0749] = "Ceiling_light_module_System_Name"
UDS_RDBI.dataIdentifiers[0x074a] = "Left_front_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x074b] = "Right_front_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x074c] = "Control_module_for_auxiliary_air_heater_System_Name"
UDS_RDBI.dataIdentifiers[0x074d] = "Ioniser_System_Name"
UDS_RDBI.dataIdentifiers[0x074e] = "Multi_function_steering_wheel_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x074f] = "Left_rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0750] = "Right_rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0751] = "Left_rear_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0752] = "Right_rear_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0753] = "Display_unit_1_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x0754] = "Battery_monitoring_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x0755] = "Roof_blind_System_Name"
UDS_RDBI.dataIdentifiers[0x0756] = "Sun_roof_2_System_Name"
UDS_RDBI.dataIdentifiers[0x0757] = "Display_unit_2_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x0758] = "Telephone_handset_2_System_Name"
UDS_RDBI.dataIdentifiers[0x0759] = "Traffic_data_aerial_System_Name"
UDS_RDBI.dataIdentifiers[0x075a] = "Chip_card_reader_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x075b] = "Hands_free_system_System_Name"
UDS_RDBI.dataIdentifiers[0x075c] = "Telephone_handset_System_Name"
UDS_RDBI.dataIdentifiers[0x075d] = "Display_unit_front_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x075e] = "Multimedia_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x075f] = "Digital_sound_system_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x07a0] = "Control_unit_for_wiper_motor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a1] = "Rain_light_recognition_sensor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a2] = "Light_switch_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a3] = "Garage_door_opener_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a4] = "Garage_door_opener_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a5] = "Ignition_key_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a6] = "Left_front_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a7] = "Right_front_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a8] = "Left_rear_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07a9] = "Right_rear_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07aa] = "Data_medium_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ab] = "Drivers_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ac] = "Front_passengers_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ad] = "Left_headlamp_power_output_stage_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ae] = "Right_headlamp_power_output_stage_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07af] = "Sensor_for_anti_theft_alarm_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b0] = "Rear_lid_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b1] = "Alarm_horn_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b2] = "Automatic_day_night_interior_mirror_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b3] = "Sun_roof_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b4] = "Steering_column_lock_actuator_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b5] = "Anti_theft_tilt_system_control_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b6] = "Tire_pressure_monitor_antenna_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b7] = "Heated_windshield_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b8] = "Rear_light_left_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07b9] = "Ceiling_light_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ba] = "Left_front_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07bb] = "Right_front_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07bc] = "Control_module_for_auxiliary_air_heater_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07bd] = "Ioniser_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07be] = "Multi_function_steering_wheel_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07bf] = "Left_rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c0] = "Right_rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c1] = "Left_rear_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c2] = "Right_rear_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c3] = "Display_unit_1_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c4] = "Battery_monitoring_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c5] = "Roof_blind_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c6] = "Sun_roof_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c7] = "Display_unit_2_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c8] = "Telephone_handset_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07c9] = "Traffic_data_aerial_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ca] = "Chip_card_reader_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07cb] = "Hands_free_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07cc] = "Telephone_handset_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07cd] = "Display_unit_front_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07ce] = "Multimedia_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x07cf] = "Digital_sound_system_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x0902] = "Activation of Development CAN-Messages"
UDS_RDBI.dataIdentifiers[0x2a26] = "Gateway Component List present"
UDS_RDBI.dataIdentifiers[0x2a27] = "Gateway_Component_List_Sleepindication"
UDS_RDBI.dataIdentifiers[0x2a28] = "Gateway Component List dtc"
UDS_RDBI.dataIdentifiers[0x2a29] = "Gateway Component List DiagProt"
UDS_RDBI.dataIdentifiers[0x2a2d] = "Gateway_component_list_databus_identification"
UDS_RDBI.dataIdentifiers[0x2ee0] = "Gateway_component_list_diag_path"
UDS_RDBI.dataIdentifiers[0x2ee1] = "Gateway_component_list_ecu_authentication"
UDS_RDBI.dataIdentifiers[0x3610] = "Electrically_adjustable_steering_column_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3611] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3612] = "Rear_spoiler_adjustment_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3613] = "Roof_blind_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3614] = "Motor_for_wind_deflector_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3615] = "Voltage_stabilizer_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3616] = "Switch_module_for_driver_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3617] = "Switch_module_for_front_passenger_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3618] = "Switch_module_for_rear_seat_driver_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3619] = "Switch_module_for_rear_seat_front_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x361a] = "Switch_module_2_for_driver_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x361b] = "Switch_module_2_for_front_passenger_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x361c] = "Switch_module_2_for_rear_seat_front_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x361d] = "Compact_disc_database_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3629] = "LED_headlamp_powermodule_2_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x362a] = "LED_headlamp_powermodule_2_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x362c] = "Multimedia_operating_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x362e] = "Data_medium_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x362f] = "Analog_clock_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3630] = "Relative_Air_Humidity_Interior_Sender_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3631] = "Sensor_controlled_power_rear_lid_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3632] = "Battery_monitoring_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3633] = "Air_conditioning_compressor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3634] = "Control_module_for_auxiliary_blower_motors_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3635] = "High_beam_powermodule_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3636] = "High_beam_powermodule_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3637] = "Coolant_heater_Coding_Values"
UDS_RDBI.dataIdentifiers[0x3640] = "Electrically_adjustable_steering_column_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3641] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3642] = "Rear_spoiler_adjustment_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3643] = "Roof_blind_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3644] = "Motor_for_wind_deflector_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3645] = "Voltage_stabilizer_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3646] = "Switch_module_for_driver_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3647] = "Switch_module_for_front_passenger_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3648] = "Switch_module_for_rear_seat_driver_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3649] = "Switch_module_for_rear_seat_front_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x364a] = "Switch_module_2_for_driver_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x364b] = "Switch_module_2_for_front_passenger_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x364c] = "Switch_module_2_for_rear_seat_front_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x364d] = "Compact_disc_database_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3659] = "LED_headlamp_powermodule_2_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x365a] = "LED_headlamp_powermodule_2_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x365c] = "Multimedia_operating_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x365e] = "Data_medium_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x365f] = "Analog_clock_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3660] = "Relative_Air_Humidity_Interior_Sender_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3661] = "Sensor_controlled_power_rear_lid_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3662] = "Battery_monitoring_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3663] = "Air_conditioning_compressor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3664] = "Control_module_for_auxiliary_blower_motors_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3665] = "High_beam_powermodule_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3666] = "High_beam_powermodule_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3667] = "Coolant_heater_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x3670] = "Electrically_adjustable_steering_column_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3671] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3672] = "Rear_spoiler_adjustment_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3673] = "Roof_blind_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3674] = "Motor_for_wind_deflector_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3675] = "Voltage_stabilizer_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3676] = "Switch_module_for_driver_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3677] = "Switch_module_for_front_passenger_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3678] = "Switch_module_for_rear_seat_driver_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3679] = "Switch_module_for_rear_seat_front_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x367a] = "Switch_module_2_for_driver_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x367b] = "Switch_module_2_for_front_passenger_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x367c] = "Switch_module_2_for_rear_seat_front_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x367d] = "Compact_disc_database_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3689] = "LED_headlamp_powermodule_2_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x368a] = "LED_headlamp_powermodule_2_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x368c] = "Multimedia_operating_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x368e] = "Data_medium_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x368f] = "Analog_clock_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3690] = "Relative_Air_Humidity_Interior_Sender_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3691] = "Sensor_controlled_power_rear_lid_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3692] = "Battery_monitoring_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3693] = "Air_conditioning_compressor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3694] = "Control_module_for_auxiliary_blower_motors_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3695] = "High_beam_powermodule_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3696] = "High_beam_powermodule_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x3697] = "Coolant_heater_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x36a0] = "Electrically_adjustable_steering_column_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a1] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a2] = "Rear_spoiler_adjustment_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a3] = "Roof_blind_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a4] = "Motor_for_wind_deflector_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a5] = "Voltage_stabilizer_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a6] = "Switch_module_for_driver_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a7] = "Switch_module_for_front_passenger_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a8] = "Switch_module_for_rear_seat_driver_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36a9] = "Switch_module_for_rear_seat_front_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36aa] = "Switch_module_2_for_driver_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36ab] = "Switch_module_2_for_front_passenger_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36ac] = "Switch_module_2_for_rear_seat_front_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36ad] = "Compact_disc_database_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36b9] = "LED_headlamp_powermodule_2_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36ba] = "LED_headlamp_powermodule_2_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36bc] = "Multimedia_operating_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36be] = "Data_medium_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36bf] = "Analog_clock_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c0] = "Relative_Air_Humidity_Interior_Sender_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c1] = "Sensor_controlled_power_rear_lid_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c2] = "Battery_monitoring_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c3] = "Air_conditioning_compressor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c4] = "Control_module_for_auxiliary_blower_motors_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c5] = "High_beam_powermodule_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c6] = "High_beam_powermodule_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36c7] = "Coolant_heater_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x36d0] = "Electrically_adjustable_steering_column_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d1] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d2] = "Rear_spoiler_adjustment_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d3] = "Roof_blind_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d4] = "Motor_for_wind_deflector_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d5] = "Voltage_stabilizer_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d6] = "Switch_module_for_driver_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d7] = "Switch_module_for_front_passenger_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d8] = "Switch_module_for_rear_seat_driver_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36d9] = "Switch_module_for_rear_seat_front_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36da] = "Switch_module_2_for_driver_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36db] = "Switch_module_2_for_front_passenger_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36dc] = "Switch_module_2_for_rear_seat_front_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36dd] = "Compact_disc_database_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36e9] = "LED_headlamp_powermodule_2_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36ea] = "LED_headlamp_powermodule_2_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36ec] = "Multimedia_operating_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36ee] = "Data_medium_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36ef] = "Analog_clock_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f0] = "Relative_Air_Humidity_Interior_Sender_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f1] = "Sensor_controlled_power_rear_lid_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f2] = "Battery_monitoring_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f3] = "Air_conditioning_compressor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f4] = "Control_module_for_auxiliary_blower_motors_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f5] = "High_beam_powermodule_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f6] = "High_beam_powermodule_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x36f7] = "Coolant_heater_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x3700] = "Electrically_adjustable_steering_column_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3701] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3702] = "Rear_spoiler_adjustment_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3703] = "Roof_blind_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3704] = "Motor_for_wind_deflector_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3705] = "Voltage_stabilizer_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3706] = "Switch_module_for_driver_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3707] = "Switch_module_for_front_passenger_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3708] = "Switch_module_for_rear_seat_driver_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3709] = "Switch_module_for_rear_seat_front_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x370a] = "Switch_module_2_for_driver_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x370b] = "Switch_module_2_for_front_passenger_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x370c] = "Switch_module_2_for_rear_seat_front_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x370d] = "Compact_disc_database_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3719] = "LED_headlamp_powermodule_2_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x371a] = "LED_headlamp_powermodule_2_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x371c] = "Multimedia_operating_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x371e] = "Data_medium_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x371f] = "Analog_clock_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3720] = "Relative_Air_Humidity_Interior_Sender_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3721] = "Sensor_controlled_power_rear_lid_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3722] = "Battery_monitoring_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3723] = "Air_conditioning_compressor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3724] = "Control_module_for_auxiliary_blower_motors_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3725] = "High_beam_powermodule_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3726] = "High_beam_powermodule_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3727] = "Coolant_heater_Serial_Number"
UDS_RDBI.dataIdentifiers[0x3730] = "Electrically_adjustable_steering_column_System_Name"
UDS_RDBI.dataIdentifiers[0x3731] = "Relative_humidity_sensor_in_fresh_air_intake_duct_System_Name"
UDS_RDBI.dataIdentifiers[0x3732] = "Rear_spoiler_adjustment_System_Name"
UDS_RDBI.dataIdentifiers[0x3733] = "Roof_blind_2_System_Name"
UDS_RDBI.dataIdentifiers[0x3734] = "Motor_for_wind_deflector_System_Name"
UDS_RDBI.dataIdentifiers[0x3735] = "Voltage_stabilizer_System_Name"
UDS_RDBI.dataIdentifiers[0x3736] = "Switch_module_for_driver_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x3737] = "Switch_module_for_front_passenger_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x3738] = "Switch_module_for_rear_seat_driver_side_System_Name"
UDS_RDBI.dataIdentifiers[0x3739] = "Switch_module_for_rear_seat_front_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x373a] = "Switch_module_2_for_driver_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x373b] = "Switch_module_2_for_front_passenger_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x373c] = "Switch_module_2_for_rear_seat_front_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x373d] = "Compact_disc_database_System_Name"
UDS_RDBI.dataIdentifiers[0x3749] = "LED_headlamp_powermodule_2_left_System_Name"
UDS_RDBI.dataIdentifiers[0x374a] = "LED_headlamp_powermodule_2_right_System_Name"
UDS_RDBI.dataIdentifiers[0x374c] = "Multimedia_operating_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x374e] = "Data_medium_2_System_Name"
UDS_RDBI.dataIdentifiers[0x374f] = "Analog_clock_System_Name"
UDS_RDBI.dataIdentifiers[0x3750] = "Relative_Air_Humidity_Interior_Sender_System_Name"
UDS_RDBI.dataIdentifiers[0x3751] = "Sensor_controlled_power_rear_lid_System_Name"
UDS_RDBI.dataIdentifiers[0x3752] = "Battery_monitoring_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x3753] = "Air_conditioning_compressor_System_Name"
UDS_RDBI.dataIdentifiers[0x3754] = "Control_module_for_auxiliary_blower_motors_System_Name"
UDS_RDBI.dataIdentifiers[0x3755] = "High_beam_powermodule_left_System_Name"
UDS_RDBI.dataIdentifiers[0x3756] = "High_beam_powermodule_right_System_Name"
UDS_RDBI.dataIdentifiers[0x3757] = "Coolant_heater_System_Name"
UDS_RDBI.dataIdentifiers[0x37a0] = "Electrically_adjustable_steering_column_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a1] = "Relative_humidity_sensor_in_fresh_air_intake_duct_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a2] = "Rear_spoiler_adjustment_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a3] = "Roof_blind_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a4] = "Motor_for_wind_deflector_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a5] = "Voltage_stabilizer_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a6] = "Switch_module_for_driver_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a7] = "Switch_module_for_front_passenger_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a8] = "Switch_module_for_rear_seat_driver_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37a9] = "Switch_module_for_rear_seat_front_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37aa] = "Switch_module_2_for_driver_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37ab] = "Switch_module_2_for_front_passenger_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37ac] = "Switch_module_2_for_rear_seat_front_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37ad] = "Compact_disc_database_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37b9] = "LED_headlamp_powermodule_2_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37ba] = "LED_headlamp_powermodule_2_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37bc] = "Multimedia_operating_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37be] = "Data_medium_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37bf] = "Analog_clock_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c0] = "Relative_Air_Humidity_Interior_Sender_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c1] = "Sensor_controlled_power_rear_lid_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c2] = "Battery_monitoring_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c3] = "Air_conditioning_compressor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c4] = "Control_module_for_auxiliary_blower_motors_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c5] = "High_beam_powermodule_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c6] = "High_beam_powermodule_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x37c7] = "Coolant_heater_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x5867] = "In_use_monitor_performance_ratio_1"
UDS_RDBI.dataIdentifiers[0x5868] = "In_use_monitor_performance_ratio_2"
UDS_RDBI.dataIdentifiers[0x5869] = "In_use_monitor_performance_ratio_3"
UDS_RDBI.dataIdentifiers[0x6001] = "Control_unit_for_wiper_motor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6002] = "Rain_light_recognition_sensor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6003] = "Light_switch_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6004] = "Garage_door_opener_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6005] = "Garage_door_opener_operating_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6006] = "Ignition_key_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6007] = "Left_front_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6008] = "Right_front_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6009] = "Left_rear_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600a] = "LED_headlamp_powermodule_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600b] = "LED_headlamp_powermodule_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600c] = "LED_headlamp_powermodule_2_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600d] = "LED_headlamp_powermodule_2_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600e] = "Operating_and_display_unit_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x600f] = "Operating_and_display_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6010] = "Right_rear_seat_ventilation_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6011] = "Data_medium_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6012] = "Drivers_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6013] = "Front_passengers_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6014] = "Left_headlamp_power_output_stage_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6015] = "Right_headlamp_power_output_stage_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6016] = "Sensor_for_anti_theft_alarm_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6017] = "Rear_lid_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6018] = "Alarm_horn_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6019] = "Automatic_day_night_interior_mirror_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601a] = "Remote_control_auxiliary_heater_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601b] = "Fresh_air_blower_front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601c] = "Fresh_air_blower_back_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601d] = "Alternator_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601e] = "Interior_light_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x601f] = "Refrigerant_pressure_and_temperature_sender_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6020] = "Sun_roof_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6021] = "Steering_column_lock_actuator_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6022] = "Anti_theft_tilt_system_control_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6023] = "Tire_pressure_monitor_antenna_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6024] = "Heated_windshield_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6025] = "Rear_light_left_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6026] = "Ceiling_light_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6027] = "Left_front_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6028] = "Right_front_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6029] = "Control_module_for_auxiliary_air_heater_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602a] = "Belt Pretensioner left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602b] = "Belt Pretensioner right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602c] = "Occupant Detection_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602d] = "Selector_lever_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602e] = "NOx_sensor_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x602f] = "NOx_sensor_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6030] = "Ioniser_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6031] = "Multi_function_steering_wheel_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6032] = "Left_rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6033] = "Right_rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6034] = "Left_rear_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6035] = "Right_rear_massage_seat_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6036] = "Display_unit_1_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6037] = "Battery_monitoring_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6038] = "Roof_blind_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6039] = "Sun_roof_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603a] = "Steering_angle_sender_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603b] = "Lane_change_assistant 2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603c] = "Pitch_rate_sender_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603d] = "ESP_sensor_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603e] = "Electronic_ignition_lock_Coding_Values"
UDS_RDBI.dataIdentifiers[0x603f] = "Air_quality_sensor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6040] = "Display_unit_2_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6041] = "Telephone_handset_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6042] = "Chip_card_reader_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6043] = "Traffic_data_aerial_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6044] = "Hands_free_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6045] = "Telephone_handset_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6046] = "Display_unit_front_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6047] = "Multimedia_operating_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6048] = "Digital_sound_system_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6049] = "Electrically_adjustable_steering_column_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604a] = "Interface_for_external_multimedia_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604b] = "Relative_Air_Humidity_Interior_Sender_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604c] = "Drivers_door_rear_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604d] = "Passengers_rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604e] = "Sensor_controlled_power_rear_lid_Coding_Values"
UDS_RDBI.dataIdentifiers[0x604f] = "Camera_for_night_vision_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6050] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6051] = "Rear_spoiler_adjustment_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6052] = "Roof_blind_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6053] = "Motor_for_wind_deflector_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6054] = "Voltage_stabilizer_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6055] = "Switch_module_for_driver_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6056] = "Switch_module_for_front_passenger_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6057] = "Switch_module_for_rear_seat_driver_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6058] = "Switch_module_for_rear_seat_front_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6059] = "Switch_module_2_for_driver_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605a] = "Battery_charger_unit_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605b] = "Battery_charger_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605c] = "Battery_charger_unit_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605d] = "Air_conditioning_compressor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605e] = "Neck_heating_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x605f] = "Neck_heating_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6060] = "Switch_module_2_for_front_passenger_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6061] = "Switch_module_2_for_rear_seat_front_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6062] = "Compact_disc_database_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6063] = "Rear_climatronic_operating_and_display_unit_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6064] = "Rear_climatronic_operating_and_display_unit_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6065] = "Door_handle_front_left_Kessy_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6066] = "Door_handle_front_right_Kessy_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6067] = "Door_handle_rear_left_Kessy_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6068] = "Door_handle_rear_right_Kessy_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6069] = "Power_converter_DC_AC_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606a] = "Battery_monitoring_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606b] = "Matrix_headlamp_powermodule_1_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606c] = "Matrix_headlamp_powermodule_1_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606d] = "High_beam_powermodule_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606e] = "High_beam_powermodule_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x606f] = "Air_suspension_compressor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6070] = "Rear_brake_actuator_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6071] = "Rear_brake_actuator_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6072] = "Analog_clock_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6073] = "Rear_door_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6079] = "Data_medium_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607a] = "Operating_unit_center_console_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607b] = "Operating_unit_center_console_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607c] = "Operating_unit_center_console_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607d] = "Operating_unit_center_console_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607e] = "Interface_for_radiodisplay_Coding_Values"
UDS_RDBI.dataIdentifiers[0x607f] = "Parkassist_entry_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6086] = "Belt_pretensioner_3rd_row_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6087] = "Belt_pretensioner_3rd_row_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6088] = "Injection_valve_heater_control_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6089] = "Steering_column_switch_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608a] = "Brake_assistance_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608b] = "Trailer_articulation_angle_sensor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608c] = "Cup_holder_with_heater_and_cooling_element_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608d] = "Range_of_vision_sensing_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608e] = "Convenience_and_driver_assist_operating_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x608f] = "Cradle_rear_climatronic_operating_and_display_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6090] = "Trailer_weight_nose_weight_detection_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6091] = "Sensor_carbon_dioxide_concentration_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6092] = "Sensor_fine_dust_concentration_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6093] = "Volume_control_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6094] = "Belt_buckle_presenter_2nd_row_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6095] = "Belt_buckle_presenter_2nd_row_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6096] = "Operating_and_display_unit_6_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6097] = "Active_accelerator_pedal_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6098] = "Multimedia_operating_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6099] = "Display_unit_3_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609a] = "Display_unit_4_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609b] = "Display_unit_5_for_multimedia_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609c] = "Control_module_for_auxiliary_blower_motors_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609d] = "Operating_and_display_unit_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609e] = "Operating_and_display_unit_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x609f] = "Operating_and_display_unit_5_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a0] = "Side Sensor Driver Front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a1] = "Side Sensor Passenger Front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a2] = "Side Sensor Driver Rear_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a3] = "Side Sensor Passenger Rear_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a4] = "Front Sensor Driver_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a5] = "Front Sensor Passenger_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a6] = "Pedestrian Protection Driver_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a7] = "Pedestrian Protection Passenger_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a8] = "Rear Sensor Center_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60a9] = "Pedestrian Protection Center_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60aa] = "Pedestrian Protection Contact_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ab] = "Pedestrian_protection_driver_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ac] = "Pedestrian_protection_passenger_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ad] = "Central_sensor_XY_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ae] = "Refrigerant_pressure_and_temperature_sender_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60af] = "Refrigerant_pressure_and_temperature_sender_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b0] = "Switch_for_rear_multicontour_seat_driver_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b1] = "Valve_block_1_in_driver_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b2] = "Valve_block_2_in_driver_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b3] = "Valve_block_3_in_driver_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b4] = "Switch_for_rear_multicontour_seat_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b5] = "Valve_block_1_in_passenger_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b6] = "Valve_block_2_in_passenger_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b7] = "Valve_block_3_in_passenger_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b8] = "Switch_for_front_multicontour_seat_driver_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60b9] = "Valve_block_1_in_driver_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ba] = "Valve_block_2_in_driver_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60bb] = "Valve_block_3_in_driver_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60bc] = "Switch_for_front_multicontour_seat_passenger_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60bd] = "Valve_block_1_in_passenger_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60be] = "Valve_block_2_in_passenger_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60bf] = "Valve_block_3_in_passenger_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c0] = "Coolant_heater_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c1] = "Seat_backrest_fan_1_front_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c2] = "Seat_backrest_fan_2_front_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c3] = "Seat_cushion_fan_1_front_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c4] = "Seat_cushion_fan_2_front_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c5] = "Seat_backrest_fan_1_front_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c6] = "Seat_backrest_fan_2_front_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c7] = "Seat_cushion_fan_1_front_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c8] = "Seat_cushion_fan_2_front_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60c9] = "Operating_and_display_unit_1_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ca] = "Operating_and_display_unit_2_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60cb] = "Operating_and_display_unit_3_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60cc] = "Operating_and_display_unit_4_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60cd] = "Operating_and_display_unit_5_for_air_conditioning_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ce] = "Pedestrian_protection_left_hand_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60cf] = "Pedestrian_protection_right_hand_side_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d0] = "Battery_junction_box_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d1] = "Cell_module_controller_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d2] = "Cell_module_controller_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d3] = "Cell_module_controller_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d4] = "Cell_module_controller_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d5] = "Cell_module_controller_5_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d6] = "Cell_module_controller_6_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d7] = "Cell_module_controller_7_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d8] = "Cell_module_controller_8_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60d9] = "Cell_module_controller_9_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60da] = "Cell_module_controller_10_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60db] = "Cell_module_controller_11_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60dc] = "Cell_module_controller_12_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60dd] = "Seat_backrest_fan_1_rear_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60de] = "Seat_backrest_fan_2_rear_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60df] = "Seat_cushion_fan_1_rear_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e0] = "Seat_cushion_fan_2_rear_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e1] = "Seat_backrest_fan_1_rear_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e2] = "Seat_backrest_fan_2_rear_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e3] = "Seat_cushion_fan_1_rear_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e4] = "Seat_cushion_fan_2_rear_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e5] = "Auxiliary_blower_motor_control_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e6] = "Auxiliary_blower_motor_control_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e7] = "Infrared_sender_for_front_observation_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e8] = "Starter_generator_control_module_sub_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60e9] = "Media_player_1_sub_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ea] = "Media_player_2_sub_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60eb] = "Dedicated_short_range_communication_aerial_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ec] = "Refrigerant_pressure_and_temperature_sender_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ed] = "Refrigerant_pressure_and_temperature_sender_5_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ee] = "Refrigerant_pressure_and_temperature_sender_6_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ef] = "Air_coolant_actuator_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f0] = "Air_coolant_actuator_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f1] = "Cell_module_controller_13_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f2] = "Cell_module_controller_14_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f3] = "Cell_module_controller_15_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f5] = "Seat_heating_rear_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f6] = "LED_warning_indicator_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f7] = "Automatic_transmission_fluid_pump_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f8] = "Manual_transmission_fluid_pump_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60f9] = "Convenience_and_driver_assist_operating_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60fb] = "Air_coolant_actuator_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60fc] = "Valve_block_4_in_driver_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60fd] = "Valve_block_4_in_passenger_side_rear_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60fe] = "Valve_block_4_in_driver_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x60ff] = "Valve_block_4_in_passenger_side_front_seat_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6101] = "Rear_climatronic_operating_and_display_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6102] = "Refrigerant_expansion_valve_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6103] = "Refrigerant_expansion_valve_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6104] = "Refrigerant_expansion_valve_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6105] = "Refrigerant_shut_off_valve_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6106] = "Refrigerant_shut_off_valve_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6107] = "Refrigerant_shut_off_valve_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6108] = "Refrigerant_shut_off_valve_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6109] = "Refrigerant_shut_off_valve_5_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610a] = "Sunlight_sensor_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610b] = "Near_field_communication_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610c] = "Clutch_control_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610d] = "Electrical_charger_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610e] = "Rear_light_left_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x610f] = "Rear_light_right_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6110] = "Rear_light_right_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6111] = "Sunlight_sensor_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6112] = "Radiator_shutter_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6113] = "Radiator_shutter_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6114] = "Radiator_shutter_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6115] = "Radiator_shutter_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6118] = "Special_key_operating_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6119] = "Radio_interface_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611a] = "Video_self_protection_recorder_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611b] = "Special_vehicle_assist_interface_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611c] = "Electric_system_disconnection_diode_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611e] = "Belt_pretensioner_2nd_row_left_Coding_Values"
UDS_RDBI.dataIdentifiers[0x611f] = "Belt_pretensioner_2nd_row_right_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6120] = "Electrical_variable_camshaft_phasing_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6121] = "Electrical_variable_camshaft_phasing_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6122] = "Wireless_operating_unit_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6123] = "Wireless_operating_unit_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6124] = "Front_windshield_washer_pump_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6125] = "Air_quality_sensor_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6126] = "Fragrancing_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6127] = "Coolant_valve_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6128] = "Near_field_communication_control_module_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6129] = "Interior_monitoring_rear_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612a] = "Cooler_fan_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612b] = "Control_unit_heating_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612c] = "Control_unit_heating_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612d] = "Control_unit_heating_3_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612e] = "Control_unit_heating_4_Coding_Values"
UDS_RDBI.dataIdentifiers[0x612f] = "Operating_unit_drive_mode_selection_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6130] = "Side_sensor_a-pillar_driver_front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6131] = "Side_sensor_a-pillar_passenger_front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6132] = "Sensor_high_voltage_system_1_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6133] = "Side_sensor_b-pillar_driver_front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6134] = "Side_sensor_b-pillar_passenger_front_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6135] = "Multi_function_steering_wheel_control_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6136] = "Gear_selection_display_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6137] = "Cooler_fan_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6138] = "Gear_selector_control_module_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6139] = "Interior_light_module_2_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613a] = "Radio_control_center_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613b] = "Multimedia_extension_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613c] = "Control_unit_differential_lock_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613d] = "Control_unit_ride_control_system_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613e] = "Control_unit_hands_on_detection_steering_wheel_Coding_Values"
UDS_RDBI.dataIdentifiers[0x613f] = "Front_climatronic_operating_and_display_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6140] = "Auxiliary_display_unit_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6141] = "Card_reader_tv_tuner_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6142] = "Park_lock_actuator_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6143] = "Media_connector_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6144] = "Catalyst_heating_Coding_Values"
UDS_RDBI.dataIdentifiers[0x6201] = "Control_unit_for_wiper_motor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6202] = "Rain_light_recognition_sensor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6203] = "Light_switch_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6204] = "Garage_door_opener_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6205] = "Garage_door_opener_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6206] = "Ignition_key_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6207] = "Left_front_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6208] = "Right_front_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6209] = "Left_rear_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620a] = "LED_headlamp_powermodule_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620b] = "LED_headlamp_powermodule_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620c] = "LED_headlamp_powermodule_2_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620d] = "LED_headlamp_powermodule_2_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620e] = "Operating_and_display_unit_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x620f] = "Operating_and_display_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6210] = "Right_rear_seat_ventilation_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6211] = "Data_medium_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6212] = "Drivers_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6213] = "Front_passengers_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6214] = "Left_headlamp_power_output_stage_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6215] = "Right_headlamp_power_output_stage_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6216] = "Sensor_for_anti_theft_alarm_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6217] = "Rear_lid_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6218] = "Alarm_horn_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6219] = "Automatic_day_night_interior_mirror_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621a] = "Remote_control_auxiliary_heater_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621b] = "Fresh_air_blower_front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621c] = "Fresh_air_blower_back_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621d] = "Alternator_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621e] = "Interior_light_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x621f] = "Refrigerant_pressure_and_temperature_sender_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6220] = "Sun_roof_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6221] = "Steering_column_lock_actuator_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6222] = "Anti_theft_tilt_system_control_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6223] = "Tire_pressure_monitor_antenna_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6224] = "Heated_windshield_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6225] = "Rear_light_left_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6226] = "Ceiling_light_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6227] = "Left_front_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6228] = "Right_front_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6229] = "Control_module_for_auxiliary_air_heater_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622a] = "Belt Pretensioner left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622b] = "Belt Pretensioner right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622c] = "Occupant Detection_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622d] = "Selector_lever_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622e] = "NOx_sensor_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x622f] = "NOx_sensor_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6230] = "Ioniser_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6231] = "Multi_function_steering_wheel_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6232] = "Left_rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6233] = "Right_rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6234] = "Left_rear_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6235] = "Right_rear_massage_seat_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6236] = "Display_unit_1_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6237] = "Battery_monitoring_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6238] = "Roof_blind_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6239] = "Sun_roof_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623a] = "Steering_angle_sender_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623b] = "Lane_change_assistant 2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623c] = "Pitch_rate_sender_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623d] = "ESP_sensor_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623e] = "Electronic_ignition_lock_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x623f] = "Air_quality_sensor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6240] = "Display_unit_2_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6241] = "Telephone_handset_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6242] = "Chip_card_reader_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6243] = "Traffic_data_aerial_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6244] = "Hands_free_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6245] = "Telephone_handset_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6246] = "Display_unit_front_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6247] = "Multimedia_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6248] = "Digital_sound_system_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6249] = "Electrically_adjustable_steering_column_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624a] = "Interface_for_external_multimedia_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624b] = "Relative_Air_Humidity_Interior_Sender_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624c] = "Drivers_door_rear_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624d] = "Passengers_rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624e] = "Sensor_controlled_power_rear_lid_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x624f] = "Camera_for_night_vision_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6250] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6251] = "Rear_spoiler_adjustment_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6252] = "Roof_blind_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6253] = "Motor_for_wind_deflector_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6254] = "Voltage_stabilizer_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6255] = "Switch_module_for_driver_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6256] = "Switch_module_for_front_passenger_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6257] = "Switch_module_for_rear_seat_driver_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6258] = "Switch_module_for_rear_seat_front_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6259] = "Switch_module_2_for_driver_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625a] = "Battery_charger_unit_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625b] = "Battery_charger_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625c] = "Battery_charger_unit_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625d] = "Air_conditioning_compressor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625e] = "Neck_heating_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x625f] = "Neck_heating_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6260] = "Switch_module_2_for_front_passenger_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6261] = "Switch_module_2_for_rear_seat_front_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6262] = "Compact_disc_database_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6263] = "Rear_climatronic_operating_and_display_unit_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6264] = "Rear_climatronic_operating_and_display_unit_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6265] = "Door_handle_front_left_Kessy_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6266] = "Door_handle_front_right_Kessy_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6267] = "Door_handle_rear_left_Kessy_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6268] = "Door_handle_rear_right_Kessy_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6269] = "Power_converter_DC_AC_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626a] = "Battery_monitoring_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626b] = "Matrix_headlamp_powermodule_1_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626c] = "Matrix_headlamp_powermodule_1_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626d] = "High_beam_powermodule_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626e] = "High_beam_powermodule_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x626f] = "Air_suspension_compressor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6270] = "Rear_brake_actuator_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6271] = "Rear_brake_actuator_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6272] = "Analog_clock_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6273] = "Rear_door_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6279] = "Data_medium_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627a] = "Operating_unit_center_console_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627b] = "Operating_unit_center_console_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627c] = "Operating_unit_center_console_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627d] = "Operating_unit_center_console_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627e] = "Interface_for_radiodisplay_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x627f] = "Parkassist_entry_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6286] = "Belt_pretensioner_3rd_row_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6287] = "Belt_pretensioner_3rd_row_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6288] = "Injection_valve_heater_control_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6289] = "Steering_column_switch_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628a] = "Brake_assistance_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628b] = "Trailer_articulation_angle_sensor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628c] = "Cup_holder_with_heater_and_cooling_element_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628d] = "Range_of_vision_sensing_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628e] = "Convenience_and_driver_assist_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x628f] = "Cradle_rear_climatronic_operating_and_display_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6290] = "Trailer_weight_nose_weight_detection_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6291] = "Sensor_carbon_dioxide_concentration_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6292] = "Sensor_fine_dust_concentration_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6293] = "Volume_control_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6294] = "Belt_buckle_presenter_2nd_row_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6295] = "Belt_buckle_presenter_2nd_row_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6296] = "Operating_and_display_unit_6_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6297] = "Active_accelerator_pedal_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6298] = "Multimedia_operating_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6299] = "Display_unit_3_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629a] = "Display_unit_4_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629b] = "Display_unit_5_for_multimedia_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629c] = "Control_module_for_auxiliary_blower_motors_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629d] = "Operating_and_display_unit_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629e] = "Operating_and_display_unit_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x629f] = "Operating_and_display_unit_5_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a0] = "Side Sensor Driver Front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a1] = "Side Sensor Passenger Front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a2] = "Side Sensor Driver Rear_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a3] = "Side Sensor Passenger Rear_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a4] = "Front Sensor Driver_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a5] = "Front Sensor Passenger_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a6] = "Pedestrian Protection Driver_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a7] = "Pedestrian Protection Passenger_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a8] = "Rear Sensor Center_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62a9] = "Pedestrian Protection Center_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62aa] = "Pedestrian Protection Contact_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ab] = "Pedestrian_protection_driver_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ac] = "Pedestrian_protection_passenger_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ad] = "Central_sensor_XY_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ae] = "Refrigerant_pressure_and_temperature_sender_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62af] = "Refrigerant_pressure_and_temperature_sender_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b0] = "Switch_for_rear_multicontour_seat_driver_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b1] = "Valve_block_1_in_driver_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b2] = "Valve_block_2_in_driver_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b3] = "Valve_block_3_in_driver_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b4] = "Switch_for_rear_multicontour_seat_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b5] = "Valve_block_1_in_passenger_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b6] = "Valve_block_2_in_passenger_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b7] = "Valve_block_3_in_passenger_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b8] = "Switch_for_front_multicontour_seat_driver_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62b9] = "Valve_block_1_in_driver_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ba] = "Valve_block_2_in_driver_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62bb] = "Valve_block_3_in_driver_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62bc] = "Switch_for_front_multicontour_seat_passenger_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62bd] = "Valve_block_1_in_passenger_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62be] = "Valve_block_2_in_passenger_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62bf] = "Valve_block_3_in_passenger_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c0] = "Coolant_heater_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c1] = "Seat_backrest_fan_1_front_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c2] = "Seat_backrest_fan_2_front_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c3] = "Seat_cushion_fan_1_front_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c4] = "Seat_cushion_fan_2_front_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c5] = "Seat_backrest_fan_1_front_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c6] = "Seat_backrest_fan_2_front_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c7] = "Seat_cushion_fan_1_front_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c8] = "Seat_cushion_fan_2_front_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62c9] = "Operating_and_display_unit_1_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ca] = "Operating_and_display_unit_2_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62cb] = "Operating_and_display_unit_3_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62cc] = "Operating_and_display_unit_4_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62cd] = "Operating_and_display_unit_5_for_air_conditioning_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ce] = "Pedestrian_protection_left_hand_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62cf] = "Pedestrian_protection_right_hand_side_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d0] = "Battery_junction_box_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d1] = "Cell_module_controller_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d2] = "Cell_module_controller_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d3] = "Cell_module_controller_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d4] = "Cell_module_controller_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d5] = "Cell_module_controller_5_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d6] = "Cell_module_controller_6_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d7] = "Cell_module_controller_7_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d8] = "Cell_module_controller_8_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62d9] = "Cell_module_controller_9_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62da] = "Cell_module_controller_10_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62db] = "Cell_module_controller_11_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62dc] = "Cell_module_controller_12_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62dd] = "Seat_backrest_fan_1_rear_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62de] = "Seat_backrest_fan_2_rear_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62df] = "Seat_cushion_fan_1_rear_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e0] = "Seat_cushion_fan_2_rear_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e1] = "Seat_backrest_fan_1_rear_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e2] = "Seat_backrest_fan_2_rear_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e3] = "Seat_cushion_fan_1_rear_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e4] = "Seat_cushion_fan_2_rear_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e5] = "Auxiliary_blower_motor_control_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e6] = "Auxiliary_blower_motor_control_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e7] = "Infrared_sender_for_front_observation_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e8] = "Starter_generator_control_module_sub_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62e9] = "Media_player_1_sub_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ea] = "Media_player_2_sub_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62eb] = "Dedicated_short_range_communication_aerial_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ec] = "Refrigerant_pressure_and_temperature_sender_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ed] = "Refrigerant_pressure_and_temperature_sender_5_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ee] = "Refrigerant_pressure_and_temperature_sender_6_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ef] = "Air_coolant_actuator_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f0] = "Air_coolant_actuator_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f1] = "Cell_module_controller_13_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f2] = "Cell_module_controller_14_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f3] = "Cell_module_controller_15_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f5] = "Seat_heating_rear_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f6] = "LED_warning_indicator_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f7] = "Automatic_transmission_fluid_pump_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f8] = "Manual_transmission_fluid_pump_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62f9] = "Convenience_and_driver_assist_operating_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62fb] = "Air_coolant_actuator_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62fc] = "Valve_block_4_in_driver_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62fd] = "Valve_block_4_in_passenger_side_rear_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62fe] = "Valve_block_4_in_driver_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x62ff] = "Valve_block_4_in_passenger_side_front_seat_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6301] = "Rear_climatronic_operating_and_display_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6302] = "Refrigerant_expansion_valve_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6303] = "Refrigerant_expansion_valve_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6304] = "Refrigerant_expansion_valve_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6305] = "Refrigerant_shut_off_valve_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6306] = "Refrigerant_shut_off_valve_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6307] = "Refrigerant_shut_off_valve_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6308] = "Refrigerant_shut_off_valve_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6309] = "Refrigerant_shut_off_valve_5_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630a] = "Sunlight_sensor_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630b] = "Near_field_communication_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630c] = "Clutch_control_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630d] = "Electrical_charger_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630e] = "Rear_light_left_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x630f] = "Rear_light_right_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6310] = "Rear_light_right_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6311] = "Sunlight_sensor_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6312] = "Radiator_shutter_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6313] = "Radiator_shutter_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6314] = "Radiator_shutter_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6315] = "Radiator_shutter_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6318] = "Special_key_operating_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6319] = "Radio_interface_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631a] = "Video_self_protection_recorder_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631b] = "Special_vehicle_assist_interface_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631c] = "Electric_system_disconnection_diode_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631e] = "Belt_pretensioner_2nd_row_left_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x631f] = "Belt_pretensioner_2nd_row_right_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6320] = "Electrical_variable_camshaft_phasing_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6321] = "Electrical_variable_camshaft_phasing_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6322] = "Wireless_operating_unit_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6323] = "Wireless_operating_unit_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6324] = "Front_windshield_washer_pump_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6325] = "Air_quality_sensor_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6326] = "Fragrancing_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6327] = "Coolant_valve_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6328] = "Near_field_communication_control_module_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6329] = "Interior_monitoring_rear_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632a] = "Cooler_fan_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632b] = "Control_unit_heating_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632c] = "Control_unit_heating_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632d] = "Control_unit_heating_3_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632e] = "Control_unit_heating_4_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x632f] = "Operating_unit_drive_mode_selection_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6330] = "Side_sensor_a-pillar_driver_front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6331] = "Side_sensor_a-pillar_passenger_front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6332] = "Sensor_high_voltage_system_1_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6333] = "Side_sensor_b-pillar_driver_front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6334] = "Side_sensor_b-pillar_passenger_front_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6335] = "Multi_function_steering_wheel_control_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6336] = "Gear_selection_display_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6337] = "Cooler_fan_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6338] = "Gear_selector_control_module_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6339] = "Interior_light_module_2_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633a] = "Radio_control_center_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633b] = "Multimedia_extension_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633c] = "Control_unit_differential_lock_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633d] = "Control_unit_ride_control_system_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633e] = "Control_unit_hands_on_detection_steering_wheel_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x633f] = "Front_climatronic_operating_and_display_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6340] = "Auxiliary_display_unit_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6341] = "Card_reader_tv_tuner_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6342] = "Park_lock_actuator_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6343] = "Media_connector_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6344] = "Catalyst_heating_Spare_Part_Number"
UDS_RDBI.dataIdentifiers[0x6401] = "Control_unit_for_wiper_motor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6402] = "Rain_light_recognition_sensor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6403] = "Light_switch_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6404] = "Garage_door_opener_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6405] = "Garage_door_opener_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6406] = "Ignition_key_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6407] = "Left_front_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6408] = "Right_front_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6409] = "Left_rear_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640a] = "LED_headlamp_powermodule_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640b] = "LED_headlamp_powermodule_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640c] = "LED_headlamp_powermodule_2_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640d] = "LED_headlamp_powermodule_2_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640e] = "Operating_and_display_unit_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x640f] = "Operating_and_display_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6410] = "Right_rear_seat_ventilation_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6411] = "Data_medium_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6412] = "Drivers_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6413] = "Front_passengers_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6414] = "Left_headlamp_power_output_stage_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6415] = "Right_headlamp_power_output_stage_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6416] = "Sensor_for_anti_theft_alarm_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6417] = "Rear_lid_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6418] = "Alarm_horn_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6419] = "Automatic_day_night_interior_mirror_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641a] = "Remote_control_auxiliary_heater_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641b] = "Fresh_air_blower_front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641c] = "Fresh_air_blower_back_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641d] = "Alternator_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641e] = "Interior_light_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x641f] = "Refrigerant_pressure_and_temperature_sender_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6420] = "Sun_roof_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6421] = "Steering_column_lock_actuator_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6422] = "Anti_theft_tilt_system_control_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6423] = "Tire_pressure_monitor_antenna_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6424] = "Heated_windshield_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6425] = "Rear_light_left_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6426] = "Ceiling_light_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6427] = "Left_front_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6428] = "Right_front_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6429] = "Control_module_for_auxiliary_air_heater_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642a] = "Belt Pretensioner left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642b] = "Belt Pretensioner right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642c] = "Occupant Detection_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642d] = "Selector_lever_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642e] = "NOx_sensor_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x642f] = "NOx_sensor_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6430] = "Ioniser_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6431] = "Multi_function_steering_wheel_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6432] = "Left_rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6433] = "Right_rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6434] = "Left_rear_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6435] = "Right_rear_massage_seat_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6436] = "Display_unit_1_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6437] = "Battery_monitoring_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6438] = "Roof_blind_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6439] = "Sun_roof_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643a] = "Steering_angle_sender_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643b] = "Lane_change_assistant 2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643c] = "Pitch_rate_sender_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643d] = "ESP_sensor_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643e] = "Electronic_ignition_lock_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x643f] = "Air_quality_sensor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6440] = "Display_unit_2_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6441] = "Telephone_handset_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6442] = "Chip_card_reader_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6443] = "Traffic_data_aerial_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6444] = "Hands_free_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6445] = "Telephone_handset_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6446] = "Display_unit_front_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6447] = "Multimedia_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6448] = "Digital_sound_system_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6449] = "Electrically_adjustable_steering_column_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644a] = "Interface_for_external_multimedia_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644b] = "Relative_Air_Humidity_Interior_Sender_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644c] = "Drivers_door_rear_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644d] = "Passengers_rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644e] = "Sensor_controlled_power_rear_lid_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x644f] = "Camera_for_night_vision_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6450] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6451] = "Rear_spoiler_adjustment_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6452] = "Roof_blind_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6453] = "Motor_for_wind_deflector_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6454] = "Voltage_stabilizer_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6455] = "Switch_module_for_driver_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6456] = "Switch_module_for_front_passenger_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6457] = "Switch_module_for_rear_seat_driver_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6458] = "Switch_module_for_rear_seat_front_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6459] = "Switch_module_2_for_driver_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645a] = "Battery_charger_unit_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645b] = "Battery_charger_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645c] = "Battery_charger_unit_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645d] = "Air_conditioning_compressor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645e] = "Neck_heating_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x645f] = "Neck_heating_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6460] = "Switch_module_2_for_front_passenger_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6461] = "Switch_module_2_for_rear_seat_front_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6462] = "Compact_disc_database_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6463] = "Rear_climatronic_operating_and_display_unit_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6464] = "Rear_climatronic_operating_and_display_unit_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6465] = "Door_handle_front_left_Kessy_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6466] = "Door_handle_front_right_Kessy_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6467] = "Door_handle_rear_left_Kessy_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6468] = "Door_handle_rear_right_Kessy_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6469] = "Power_converter_DC_AC_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646a] = "Battery_monitoring_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646b] = "Matrix_headlamp_powermodule_1_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646c] = "Matrix_headlamp_powermodule_1_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646d] = "High_beam_powermodule_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646e] = "High_beam_powermodule_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x646f] = "Air_suspension_compressor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6470] = "Rear_brake_actuator_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6471] = "Rear_brake_actuator_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6472] = "Analog_clock_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6473] = "Rear_door_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6479] = "Data_medium_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647a] = "Operating_unit_center_console_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647b] = "Operating_unit_center_console_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647c] = "Operating_unit_center_console_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647d] = "Operating_unit_center_console_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647e] = "Interface_for_radiodisplay_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x647f] = "Parkassist_entry_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6486] = "Belt_pretensioner_3rd_row_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6487] = "Belt_pretensioner_3rd_row_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6488] = "Injection_valve_heater_control_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6489] = "Steering_column_switch_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648a] = "Brake_assistance_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648b] = "Trailer_articulation_angle_sensor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648c] = "Cup_holder_with_heater_and_cooling_element_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648d] = "Range_of_vision_sensing_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648e] = "Convenience_and_driver_assist_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x648f] = "Cradle_rear_climatronic_operating_and_display_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6490] = "Trailer_weight_nose_weight_detection_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6491] = "Sensor_carbon_dioxide_concentration_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6492] = "Sensor_fine_dust_concentration_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6493] = "Volume_control_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6494] = "Belt_buckle_presenter_2nd_row_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6495] = "Belt_buckle_presenter_2nd_row_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6496] = "Operating_and_display_unit_6_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6497] = "Active_accelerator_pedal_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6498] = "Multimedia_operating_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6499] = "Display_unit_3_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649a] = "Display_unit_4_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649b] = "Display_unit_5_for_multimedia_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649c] = "Control_module_for_auxiliary_blower_motors_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649d] = "Operating_and_display_unit_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649e] = "Operating_and_display_unit_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x649f] = "Operating_and_display_unit_5_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a0] = "Side Sensor Driver Front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a1] = "Side Sensor Passenger Front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a2] = "Side Sensor Driver Rear_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a3] = "Side Sensor Passenger Rear_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a4] = "Front Sensor Driver_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a5] = "Front Sensor Passenger_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a6] = "Pedestrian Protection Driver_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a7] = "Pedestrian Protection Passenger_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a8] = "Rear Sensor Center_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64a9] = "Pedestrian Protection Center_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64aa] = "Pedestrian Protection Contact_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ab] = "Pedestrian_protection_driver_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ac] = "Pedestrian_protection_passenger_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ad] = "Central_sensor_XY_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ae] = "Refrigerant_pressure_and_temperature_sender_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64af] = "Refrigerant_pressure_and_temperature_sender_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b0] = "Switch_for_rear_multicontour_seat_driver_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b1] = "Valve_block_1_in_driver_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b2] = "Valve_block_2_in_driver_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b3] = "Valve_block_3_in_driver_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b4] = "Switch_for_rear_multicontour_seat_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b5] = "Valve_block_1_in_passenger_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b6] = "Valve_block_2_in_passenger_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b7] = "Valve_block_3_in_passenger_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b8] = "Switch_for_front_multicontour_seat_driver_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64b9] = "Valve_block_1_in_driver_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ba] = "Valve_block_2_in_driver_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64bb] = "Valve_block_3_in_driver_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64bc] = "Switch_for_front_multicontour_seat_passenger_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64bd] = "Valve_block_1_in_passenger_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64be] = "Valve_block_2_in_passenger_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64bf] = "Valve_block_3_in_passenger_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c0] = "Coolant_heater_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c1] = "Seat_backrest_fan_1_front_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c2] = "Seat_backrest_fan_2_front_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c3] = "Seat_cushion_fan_1_front_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c4] = "Seat_cushion_fan_2_front_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c5] = "Seat_backrest_fan_1_front_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c6] = "Seat_backrest_fan_2_front_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c7] = "Seat_cushion_fan_1_front_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c8] = "Seat_cushion_fan_2_front_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64c9] = "Operating_and_display_unit_1_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ca] = "Operating_and_display_unit_2_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64cb] = "Operating_and_display_unit_3_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64cc] = "Operating_and_display_unit_4_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64cd] = "Operating_and_display_unit_5_for_air_conditioning_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ce] = "Pedestrian_protection_left_hand_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64cf] = "Pedestrian_protection_right_hand_side_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d0] = "Battery_junction_box_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d1] = "Cell_module_controller_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d2] = "Cell_module_controller_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d3] = "Cell_module_controller_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d4] = "Cell_module_controller_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d5] = "Cell_module_controller_5_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d6] = "Cell_module_controller_6_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d7] = "Cell_module_controller_7_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d8] = "Cell_module_controller_8_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64d9] = "Cell_module_controller_9_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64da] = "Cell_module_controller_10_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64db] = "Cell_module_controller_11_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64dc] = "Cell_module_controller_12_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64dd] = "Seat_backrest_fan_1_rear_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64de] = "Seat_backrest_fan_2_rear_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64df] = "Seat_cushion_fan_1_rear_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e0] = "Seat_cushion_fan_2_rear_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e1] = "Seat_backrest_fan_1_rear_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e2] = "Seat_backrest_fan_2_rear_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e3] = "Seat_cushion_fan_1_rear_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e4] = "Seat_cushion_fan_2_rear_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e5] = "Auxiliary_blower_motor_control_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e6] = "Auxiliary_blower_motor_control_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e7] = "Infrared_sender_for_front_observation_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e8] = "Starter_generator_control_module_sub_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64e9] = "Media_player_1_sub_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ea] = "Media_player_2_sub_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64eb] = "Dedicated_short_range_communication_aerial_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ec] = "Refrigerant_pressure_and_temperature_sender_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ed] = "Refrigerant_pressure_and_temperature_sender_5_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ee] = "Refrigerant_pressure_and_temperature_sender_6_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ef] = "Air_coolant_actuator_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f0] = "Air_coolant_actuator_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f1] = "Cell_module_controller_13_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f2] = "Cell_module_controller_14_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f3] = "Cell_module_controller_15_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f5] = "Seat_heating_rear_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f6] = "LED_warning_indicator_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f7] = "Automatic_transmission_fluid_pump_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f8] = "Manual_transmission_fluid_pump_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64f9] = "Convenience_and_driver_assist_operating_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64fb] = "Air_coolant_actuator_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64fc] = "Valve_block_4_in_driver_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64fd] = "Valve_block_4_in_passenger_side_rear_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64fe] = "Valve_block_4_in_driver_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x64ff] = "Valve_block_4_in_passenger_side_front_seat_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6501] = "Rear_climatronic_operating_and_display_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6502] = "Refrigerant_expansion_valve_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6503] = "Refrigerant_expansion_valve_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6504] = "Refrigerant_expansion_valve_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6505] = "Refrigerant_shut_off_valve_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6506] = "Refrigerant_shut_off_valve_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6507] = "Refrigerant_shut_off_valve_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6508] = "Refrigerant_shut_off_valve_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6509] = "Refrigerant_shut_off_valve_5_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650a] = "Sunlight_sensor_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650b] = "Near_field_communication_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650c] = "Clutch_control_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650d] = "Electrical_charger_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650e] = "Rear_light_left_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x650f] = "Rear_light_right_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6510] = "Rear_light_right_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6511] = "Sunlight_sensor_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6512] = "Radiator_shutter_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6513] = "Radiator_shutter_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6514] = "Radiator_shutter_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6515] = "Radiator_shutter_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6518] = "Special_key_operating_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6519] = "Radio_interface_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651a] = "Video_self_protection_recorder_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651b] = "Special_vehicle_assist_interface_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651c] = "Electric_system_disconnection_diode_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651e] = "Belt_pretensioner_2nd_row_left_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x651f] = "Belt_pretensioner_2nd_row_right_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6520] = "Electrical_variable_camshaft_phasing_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6521] = "Electrical_variable_camshaft_phasing_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6522] = "Wireless_operating_unit_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6523] = "Wireless_operating_unit_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6524] = "Front_windshield_washer_pump_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6525] = "Air_quality_sensor_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6526] = "Fragrancing_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6527] = "Coolant_valve_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6528] = "Near_field_communication_control_module_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6529] = "Interior_monitoring_rear_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652a] = "Cooler_fan_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652b] = "Control_unit_heating_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652c] = "Control_unit_heating_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652d] = "Control_unit_heating_3_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652e] = "Control_unit_heating_4_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x652f] = "Operating_unit_drive_mode_selection_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6530] = "Side_sensor_a-pillar_driver_front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6531] = "Side_sensor_a-pillar_passenger_front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6532] = "Sensor_high_voltage_system_1_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6533] = "Side_sensor_b-pillar_driver_front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6534] = "Side_sensor_b-pillar_passenger_front_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6535] = "Multi_function_steering_wheel_control_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6536] = "Gear_selection_display_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6537] = "Cooler_fan_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6538] = "Gear_selector_control_module_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6539] = "Interior_light_module_2_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653a] = "Radio_control_center_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653b] = "Multimedia_extension_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653c] = "Control_unit_differential_lock_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653d] = "Control_unit_ride_control_system_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653e] = "Control_unit_hands_on_detection_steering_wheel_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x653f] = "Front_climatronic_operating_and_display_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6540] = "Auxiliary_display_unit_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6541] = "Card_reader_tv_tuner_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6542] = "Park_lock_actuator_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6543] = "Media_connector_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6544] = "Catalyst_heating_Application_Software_Version_Number"
UDS_RDBI.dataIdentifiers[0x6601] = "Control_unit_for_wiper_motor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6602] = "Rain_light_recognition_sensor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6603] = "Light_switch_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6604] = "Garage_door_opener_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6605] = "Garage_door_opener_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6606] = "Ignition_key_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6607] = "Left_front_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6608] = "Right_front_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6609] = "Left_rear_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660a] = "LED_headlamp_powermodule_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660b] = "LED_headlamp_powermodule_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660c] = "LED_headlamp_powermodule_2_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660d] = "LED_headlamp_powermodule_2_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660e] = "Operating_and_display_unit_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x660f] = "Operating_and_display_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6610] = "Right_rear_seat_ventilation_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6611] = "Data_medium_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6612] = "Drivers_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6613] = "Front_passengers_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6614] = "Left_headlamp_power_output_stage_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6615] = "Right_headlamp_power_output_stage_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6616] = "Sensor_for_anti_theft_alarm_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6617] = "Rear_lid_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6618] = "Alarm_horn_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6619] = "Automatic_day_night_interior_mirror_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661a] = "Remote_control_auxiliary_heater_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661b] = "Fresh_air_blower_front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661c] = "Fresh_air_blower_back_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661d] = "Alternator_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661e] = "Interior_light_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x661f] = "Refrigerant_pressure_and_temperature_sender_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6620] = "Sun_roof_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6621] = "Steering_column_lock_actuator_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6622] = "Anti_theft_tilt_system_control_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6623] = "Tire_pressure_monitor_antenna_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6624] = "Heated_windshield_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6625] = "Rear_light_left_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6626] = "Ceiling_light_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6627] = "Left_front_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6628] = "Right_front_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6629] = "Control_module_for_auxiliary_air_heater_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662a] = "Belt Pretensioner left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662b] = "Belt Pretensioner right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662c] = "Occupant Detection_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662d] = "Selector_lever_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662e] = "NOx_sensor_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x662f] = "NOx_sensor_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6630] = "Ioniser_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6631] = "Multi_function_steering_wheel_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6632] = "Left_rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6633] = "Right_rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6634] = "Left_rear_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6635] = "Right_rear_massage_seat_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6636] = "Display_unit_1_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6637] = "Battery_monitoring_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6638] = "Roof_blind_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6639] = "Sun_roof_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663a] = "Steering_angle_sender_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663b] = "Lane_change_assistant 2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663c] = "Pitch_rate_sender_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663d] = "ESP_sensor_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663e] = "Electronic_ignition_lock_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x663f] = "Air_quality_sensor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6640] = "Display_unit_2_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6641] = "Telephone_handset_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6642] = "Chip_card_reader_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6643] = "Traffic_data_aerial_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6644] = "Hands_free_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6645] = "Telephone_handset_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6646] = "Display_unit_front_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6647] = "Multimedia_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6648] = "Digital_sound_system_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6649] = "Electrically_adjustable_steering_column_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664a] = "Interface_for_external_multimedia_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664b] = "Relative_Air_Humidity_Interior_Sender_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664c] = "Drivers_door_rear_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664d] = "Passengers_rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664e] = "Sensor_controlled_power_rear_lid_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x664f] = "Camera_for_night_vision_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6650] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6651] = "Rear_spoiler_adjustment_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6652] = "Roof_blind_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6653] = "Motor_for_wind_deflector_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6654] = "Voltage_stabilizer_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6655] = "Switch_module_for_driver_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6656] = "Switch_module_for_front_passenger_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6657] = "Switch_module_for_rear_seat_driver_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6658] = "Switch_module_for_rear_seat_front_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6659] = "Switch_module_2_for_driver_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665a] = "Battery_charger_unit_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665b] = "Battery_charger_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665c] = "Battery_charger_unit_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665d] = "Air_conditioning_compressor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665e] = "Neck_heating_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x665f] = "Neck_heating_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6660] = "Switch_module_2_for_front_passenger_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6661] = "Switch_module_2_for_rear_seat_front_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6662] = "Compact_disc_database_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6663] = "Rear_climatronic_operating_and_display_unit_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6664] = "Rear_climatronic_operating_and_display_unit_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6665] = "Door_handle_front_left_Kessy_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6666] = "Door_handle_front_right_Kessy_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6667] = "Door_handle_rear_left_Kessy_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6668] = "Door_handle_rear_right_Kessy_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6669] = "Power_converter_DC_AC_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666a] = "Battery_monitoring_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666b] = "Matrix_headlamp_powermodule_1_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666c] = "Matrix_headlamp_powermodule_1_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666d] = "High_beam_powermodule_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666e] = "High_beam_powermodule_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x666f] = "Air_suspension_compressor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6670] = "Rear_brake_actuator_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6671] = "Rear_brake_actuator_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6672] = "Analog_clock_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6673] = "Rear_door_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6679] = "Data_medium_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667a] = "Operating_unit_center_console_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667b] = "Operating_unit_center_console_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667c] = "Operating_unit_center_console_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667d] = "Operating_unit_center_console_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667e] = "Interface_for_radiodisplay_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x667f] = "Parkassist_entry_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6686] = "Belt_pretensioner_3rd_row_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6687] = "Belt_pretensioner_3rd_row_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6688] = "Injection_valve_heater_control_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6689] = "Steering_column_switch_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668a] = "Brake_assistance_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668b] = "Trailer_articulation_angle_sensor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668c] = "Cup_holder_with_heater_and_cooling_element_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668d] = "Range_of_vision_sensing_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668e] = "Convenience_and_driver_assist_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x668f] = "Cradle_rear_climatronic_operating_and_display_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6690] = "Trailer_weight_nose_weight_detection_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6691] = "Sensor_carbon_dioxide_concentration_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6692] = "Sensor_fine_dust_concentration_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6693] = "Volume_control_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6694] = "Belt_buckle_presenter_2nd_row_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6695] = "Belt_buckle_presenter_2nd_row_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6696] = "Operating_and_display_unit_6_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6697] = "Active_accelerator_pedal_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6698] = "Multimedia_operating_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6699] = "Display_unit_3_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669a] = "Display_unit_4_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669b] = "Display_unit_5_for_multimedia_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669c] = "Control_module_for_auxiliary_blower_motors_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669d] = "Operating_and_display_unit_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669e] = "Operating_and_display_unit_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x669f] = "Operating_and_display_unit_5_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a0] = "Side Sensor Driver Front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a1] = "Side Sensor Passenger Front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a2] = "Side Sensor Driver Rear_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a3] = "Side Sensor Passenger Rear_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a4] = "Front Sensor Driver_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a5] = "Front Sensor Passenger_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a6] = "Pedestrian Protection Driver_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a7] = "Pedestrian Protection Passenger_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a8] = "Rear Sensor Center_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66a9] = "Pedestrian Protection Center_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66aa] = "Pedestrian Protection Contact_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ab] = "Pedestrian_protection_driver_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ac] = "Pedestrian_protection_passenger_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ad] = "Central_sensor_XY_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ae] = "Refrigerant_pressure_and_temperature_sender_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66af] = "Refrigerant_pressure_and_temperature_sender_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b0] = "Switch_for_rear_multicontour_seat_driver_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b1] = "Valve_block_1_in_driver_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b2] = "Valve_block_2_in_driver_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b3] = "Valve_block_3_in_driver_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b4] = "Switch_for_rear_multicontour_seat_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b5] = "Valve_block_1_in_passenger_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b6] = "Valve_block_2_in_passenger_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b7] = "Valve_block_3_in_passenger_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b8] = "Switch_for_front_multicontour_seat_driver_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66b9] = "Valve_block_1_in_driver_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ba] = "Valve_block_2_in_driver_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66bb] = "Valve_block_3_in_driver_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66bc] = "Switch_for_front_multicontour_seat_passenger_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66bd] = "Valve_block_1_in_passenger_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66be] = "Valve_block_2_in_passenger_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66bf] = "Valve_block_3_in_passenger_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c0] = "Coolant_heater_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c1] = "Seat_backrest_fan_1_front_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c2] = "Seat_backrest_fan_2_front_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c3] = "Seat_cushion_fan_1_front_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c4] = "Seat_cushion_fan_2_front_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c5] = "Seat_backrest_fan_1_front_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c6] = "Seat_backrest_fan_2_front_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c7] = "Seat_cushion_fan_1_front_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c8] = "Seat_cushion_fan_2_front_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66c9] = "Operating_and_display_unit_1_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ca] = "Operating_and_display_unit_2_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66cb] = "Operating_and_display_unit_3_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66cc] = "Operating_and_display_unit_4_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66cd] = "Operating_and_display_unit_5_for_air_conditioning_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ce] = "Pedestrian_protection_left_hand_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66cf] = "Pedestrian_protection_right_hand_side_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d0] = "Battery_junction_box_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d1] = "Cell_module_controller_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d2] = "Cell_module_controller_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d3] = "Cell_module_controller_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d4] = "Cell_module_controller_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d5] = "Cell_module_controller_5_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d6] = "Cell_module_controller_6_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d7] = "Cell_module_controller_7_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d8] = "Cell_module_controller_8_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66d9] = "Cell_module_controller_9_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66da] = "Cell_module_controller_10_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66db] = "Cell_module_controller_11_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66dc] = "Cell_module_controller_12_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66dd] = "Seat_backrest_fan_1_rear_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66de] = "Seat_backrest_fan_2_rear_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66df] = "Seat_cushion_fan_1_rear_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e0] = "Seat_cushion_fan_2_rear_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e1] = "Seat_backrest_fan_1_rear_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e2] = "Seat_backrest_fan_2_rear_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e3] = "Seat_cushion_fan_1_rear_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e4] = "Seat_cushion_fan_2_rear_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e5] = "Auxiliary_blower_motor_control_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e6] = "Auxiliary_blower_motor_control_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e7] = "Infrared_sender_for_front_observation_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e8] = "Starter_generator_control_module_sub_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66e9] = "Media_player_1_sub_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ea] = "Media_player_2_sub_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66eb] = "Dedicated_short_range_communication_aerial_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ec] = "Refrigerant_pressure_and_temperature_sender_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ed] = "Refrigerant_pressure_and_temperature_sender_5_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ee] = "Refrigerant_pressure_and_temperature_sender_6_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ef] = "Air_coolant_actuator_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f0] = "Air_coolant_actuator_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f1] = "Cell_module_controller_13_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f2] = "Cell_module_controller_14_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f3] = "Cell_module_controller_15_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f5] = "Seat_heating_rear_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f6] = "LED_warning_indicator_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f7] = "Automatic_transmission_fluid_pump_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f8] = "Manual_transmission_fluid_pump_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66f9] = "Convenience_and_driver_assist_operating_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66fb] = "Air_coolant_actuator_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66fc] = "Valve_block_4_in_driver_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66fd] = "Valve_block_4_in_passenger_side_rear_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66fe] = "Valve_block_4_in_driver_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x66ff] = "Valve_block_4_in_passenger_side_front_seat_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6701] = "Rear_climatronic_operating_and_display_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6702] = "Refrigerant_expansion_valve_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6703] = "Refrigerant_expansion_valve_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6704] = "Refrigerant_expansion_valve_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6705] = "Refrigerant_shut_off_valve_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6706] = "Refrigerant_shut_off_valve_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6707] = "Refrigerant_shut_off_valve_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6708] = "Refrigerant_shut_off_valve_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6709] = "Refrigerant_shut_off_valve_5_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670a] = "Sunlight_sensor_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670b] = "Near_field_communication_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670c] = "Clutch_control_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670d] = "Electrical_charger_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670e] = "Rear_light_left_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x670f] = "Rear_light_right_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6710] = "Rear_light_right_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6711] = "Sunlight_sensor_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6712] = "Radiator_shutter_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6713] = "Radiator_shutter_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6714] = "Radiator_shutter_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6715] = "Radiator_shutter_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6718] = "Special_key_operating_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6719] = "Radio_interface_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671a] = "Video_self_protection_recorder_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671b] = "Special_vehicle_assist_interface_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671c] = "Electric_system_disconnection_diode_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671e] = "Belt_pretensioner_2nd_row_left_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x671f] = "Belt_pretensioner_2nd_row_right_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6720] = "Electrical_variable_camshaft_phasing_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6721] = "Electrical_variable_camshaft_phasing_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6722] = "Wireless_operating_unit_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6723] = "Wireless_operating_unit_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6724] = "Front_windshield_washer_pump_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6725] = "Air_quality_sensor_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6726] = "Fragrancing_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6727] = "Coolant_valve_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6728] = "Near_field_communication_control_module_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6729] = "Interior_monitoring_rear_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672a] = "Cooler_fan_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672b] = "Control_unit_heating_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672c] = "Control_unit_heating_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672d] = "Control_unit_heating_3_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672e] = "Control_unit_heating_4_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x672f] = "Operating_unit_drive_mode_selection_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6730] = "Side_sensor_a-pillar_driver_front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6731] = "Side_sensor_a-pillar_passenger_front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6732] = "Sensor_high_voltage_system_1_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6733] = "Side_sensor_b-pillar_driver_front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6734] = "Side_sensor_b-pillar_passenger_front_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6735] = "Multi_function_steering_wheel_control_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6736] = "Gear_selection_display_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6737] = "Cooler_fan_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6738] = "Gear_selector_control_module_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6739] = "Interior_light_module_2_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673a] = "Radio_control_center_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673b] = "Multimedia_extension_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673c] = "Control_unit_differential_lock_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673d] = "Control_unit_ride_control_system_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673e] = "Control_unit_hands_on_detection_steering_wheel_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x673f] = "Front_climatronic_operating_and_display_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6740] = "Auxiliary_display_unit_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6741] = "Card_reader_tv_tuner_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6742] = "Park_lock_actuator_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6743] = "Media_connector_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6744] = "Catalyst_heating_Hardware_Number"
UDS_RDBI.dataIdentifiers[0x6801] = "Control_unit_for_wiper_motor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6802] = "Rain_light_recognition_sensor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6803] = "Light_switch_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6804] = "Garage_door_opener_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6805] = "Garage_door_opener_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6806] = "Ignition_key_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6807] = "Left_front_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6808] = "Right_front_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6809] = "Left_rear_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680a] = "LED_headlamp_powermodule_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680b] = "LED_headlamp_powermodule_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680c] = "LED_headlamp_powermodule_2_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680d] = "LED_headlamp_powermodule_2_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680e] = "Operating_and_display_unit_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x680f] = "Operating_and_display_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6810] = "Right_rear_seat_ventilation_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6811] = "Data_medium_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6812] = "Drivers_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6813] = "Front_passengers_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6814] = "Left_headlamp_power_output_stage_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6815] = "Right_headlamp_power_output_stage_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6816] = "Sensor_for_anti_theft_alarm_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6817] = "Rear_lid_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6818] = "Alarm_horn_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6819] = "Automatic_day_night_interior_mirror_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681a] = "Remote_control_auxiliary_heater_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681b] = "Fresh_air_blower_front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681c] = "Fresh_air_blower_back_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681d] = "Alternator_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681e] = "Interior_light_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x681f] = "Refrigerant_pressure_and_temperature_sender_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6820] = "Sun_roof_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6821] = "Steering_column_lock_actuator_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6822] = "Anti_theft_tilt_system_control_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6823] = "Tire_pressure_monitor_antenna_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6824] = "Heated_windshield_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6825] = "Rear_light_left_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6826] = "Ceiling_light_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6827] = "Left_front_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6828] = "Right_front_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6829] = "Control_module_for_auxiliary_air_heater_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682a] = "Belt Pretensioner left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682b] = "Belt Pretensioner right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682c] = "Occupant Detection_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682d] = "Selector_lever_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682e] = "NOx_sensor_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x682f] = "NOx_sensor_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6830] = "Ioniser_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6831] = "Multi_function_steering_wheel_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6832] = "Left_rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6833] = "Right_rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6834] = "Left_rear_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6835] = "Right_rear_massage_seat_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6836] = "Display_unit_1_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6837] = "Battery_monitoring_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6838] = "Roof_blind_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6839] = "Sun_roof_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683a] = "Steering_angle_sender_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683b] = "Lane_change_assistant 2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683c] = "Pitch_rate_sender_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683d] = "ESP_sensor_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683e] = "Electronic_ignition_lock_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x683f] = "Air_quality_sensor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6840] = "Display_unit_2_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6841] = "Telephone_handset_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6842] = "Chip_card_reader_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6843] = "Traffic_data_aerial_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6844] = "Hands_free_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6845] = "Telephone_handset_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6846] = "Display_unit_front_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6847] = "Multimedia_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6848] = "Digital_sound_system_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6849] = "Electrically_adjustable_steering_column_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684a] = "Interface_for_external_multimedia_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684b] = "Relative_Air_Humidity_Interior_Sender_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684c] = "Drivers_door_rear_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684d] = "Passengers_rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684e] = "Sensor_controlled_power_rear_lid_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x684f] = "Camera_for_night_vision_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6850] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6851] = "Rear_spoiler_adjustment_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6852] = "Roof_blind_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6853] = "Motor_for_wind_deflector_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6854] = "Voltage_stabilizer_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6855] = "Switch_module_for_driver_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6856] = "Switch_module_for_front_passenger_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6857] = "Switch_module_for_rear_seat_driver_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6858] = "Switch_module_for_rear_seat_front_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6859] = "Switch_module_2_for_driver_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685a] = "Battery_charger_unit_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685b] = "Battery_charger_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685c] = "Battery_charger_unit_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685d] = "Air_conditioning_compressor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685e] = "Neck_heating_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x685f] = "Neck_heating_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6860] = "Switch_module_2_for_front_passenger_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6861] = "Switch_module_2_for_rear_seat_front_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6862] = "Compact_disc_database_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6863] = "Rear_climatronic_operating_and_display_unit_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6864] = "Rear_climatronic_operating_and_display_unit_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6865] = "Door_handle_front_left_Kessy_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6866] = "Door_handle_front_right_Kessy_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6867] = "Door_handle_rear_left_Kessy_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6868] = "Door_handle_rear_right_Kessy_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6869] = "Power_converter_DC_AC_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686a] = "Battery_monitoring_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686b] = "Matrix_headlamp_powermodule_1_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686c] = "Matrix_headlamp_powermodule_1_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686d] = "High_beam_powermodule_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686e] = "High_beam_powermodule_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x686f] = "Air_suspension_compressor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6870] = "Rear_brake_actuator_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6871] = "Rear_brake_actuator_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6872] = "Analog_clock_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6873] = "Rear_door_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6879] = "Data_medium_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687a] = "Operating_unit_center_console_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687b] = "Operating_unit_center_console_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687c] = "Operating_unit_center_console_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687d] = "Operating_unit_center_console_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687e] = "Interface_for_radiodisplay_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x687f] = "Parkassist_entry_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6886] = "Belt_pretensioner_3rd_row_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6887] = "Belt_pretensioner_3rd_row_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6888] = "Injection_valve_heater_control_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6889] = "Steering_column_switch_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688a] = "Brake_assistance_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688b] = "Trailer_articulation_angle_sensor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688c] = "Cup_holder_with_heater_and_cooling_element_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688d] = "Range_of_vision_sensing_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688e] = "Convenience_and_driver_assist_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x688f] = "Cradle_rear_climatronic_operating_and_display_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6890] = "Trailer_weight_nose_weight_detection_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6891] = "Sensor_carbon_dioxide_concentration_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6892] = "Sensor_fine_dust_concentration_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6893] = "Volume_control_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6894] = "Belt_buckle_presenter_2nd_row_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6895] = "Belt_buckle_presenter_2nd_row_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6896] = "Operating_and_display_unit_6_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6897] = "Active_accelerator_pedal_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6898] = "Multimedia_operating_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6899] = "Display_unit_3_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689a] = "Display_unit_4_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689b] = "Display_unit_5_for_multimedia_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689c] = "Control_module_for_auxiliary_blower_motors_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689d] = "Operating_and_display_unit_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689e] = "Operating_and_display_unit_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x689f] = "Operating_and_display_unit_5_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a0] = "Side Sensor Driver Front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a1] = "Side Sensor Passenger Front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a2] = "Side Sensor Driver Rear_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a3] = "Side Sensor Passenger Rear_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a4] = "Front Sensor Driver_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a5] = "Front Sensor Passenger_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a6] = "Pedestrian Protection Driver_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a7] = "Pedestrian Protection Passenger_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a8] = "Rear Sensor Center_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68a9] = "Pedestrian Protection Center_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68aa] = "Pedestrian Protection Contact_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ab] = "Pedestrian_protection_driver_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ac] = "Pedestrian_protection_passenger_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ad] = "Central_sensor_XY_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ae] = "Refrigerant_pressure_and_temperature_sender_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68af] = "Refrigerant_pressure_and_temperature_sender_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b0] = "Switch_for_rear_multicontour_seat_driver_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b1] = "Valve_block_1_in_driver_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b2] = "Valve_block_2_in_driver_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b3] = "Valve_block_3_in_driver_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b4] = "Switch_for_rear_multicontour_seat_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b5] = "Valve_block_1_in_passenger_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b6] = "Valve_block_2_in_passenger_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b7] = "Valve_block_3_in_passenger_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b8] = "Switch_for_front_multicontour_seat_driver_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68b9] = "Valve_block_1_in_driver_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ba] = "Valve_block_2_in_driver_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68bb] = "Valve_block_3_in_driver_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68bc] = "Switch_for_front_multicontour_seat_passenger_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68bd] = "Valve_block_1_in_passenger_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68be] = "Valve_block_2_in_passenger_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68bf] = "Valve_block_3_in_passenger_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c0] = "Coolant_heater_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c1] = "Seat_backrest_fan_1_front_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c2] = "Seat_backrest_fan_2_front_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c3] = "Seat_cushion_fan_1_front_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c4] = "Seat_cushion_fan_2_front_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c5] = "Seat_backrest_fan_1_front_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c6] = "Seat_backrest_fan_2_front_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c7] = "Seat_cushion_fan_1_front_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c8] = "Seat_cushion_fan_2_front_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68c9] = "Operating_and_display_unit_1_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ca] = "Operating_and_display_unit_2_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68cb] = "Operating_and_display_unit_3_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68cc] = "Operating_and_display_unit_4_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68cd] = "Operating_and_display_unit_5_for_air_conditioning_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ce] = "Pedestrian_protection_left_hand_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68cf] = "Pedestrian_protection_right_hand_side_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d0] = "Battery_junction_box_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d1] = "Cell_module_controller_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d2] = "Cell_module_controller_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d3] = "Cell_module_controller_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d4] = "Cell_module_controller_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d5] = "Cell_module_controller_5_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d6] = "Cell_module_controller_6_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d7] = "Cell_module_controller_7_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d8] = "Cell_module_controller_8_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68d9] = "Cell_module_controller_9_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68da] = "Cell_module_controller_10_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68db] = "Cell_module_controller_11_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68dc] = "Cell_module_controller_12_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68dd] = "Seat_backrest_fan_1_rear_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68de] = "Seat_backrest_fan_2_rear_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68df] = "Seat_cushion_fan_1_rear_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e0] = "Seat_cushion_fan_2_rear_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e1] = "Seat_backrest_fan_1_rear_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e2] = "Seat_backrest_fan_2_rear_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e3] = "Seat_cushion_fan_1_rear_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e4] = "Seat_cushion_fan_2_rear_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e5] = "Auxiliary_blower_motor_control_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e6] = "Auxiliary_blower_motor_control_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e7] = "Infrared_sender_for_front_observation_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e8] = "Starter_generator_control_module_sub_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68e9] = "Media_player_1_sub_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ea] = "Media_player_2_sub_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68eb] = "Dedicated_short_range_communication_aerial_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ec] = "Refrigerant_pressure_and_temperature_sender_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ed] = "Refrigerant_pressure_and_temperature_sender_5_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ee] = "Refrigerant_pressure_and_temperature_sender_6_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ef] = "Air_coolant_actuator_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f0] = "Air_coolant_actuator_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f1] = "Cell_module_controller_13_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f2] = "Cell_module_controller_14_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f3] = "Cell_module_controller_15_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f5] = "Seat_heating_rear_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f6] = "LED_warning_indicator_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f7] = "Automatic_transmission_fluid_pump_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f8] = "Manual_transmission_fluid_pump_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68f9] = "Convenience_and_driver_assist_operating_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68fb] = "Air_coolant_actuator_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68fc] = "Valve_block_4_in_driver_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68fd] = "Valve_block_4_in_passenger_side_rear_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68fe] = "Valve_block_4_in_driver_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x68ff] = "Valve_block_4_in_passenger_side_front_seat_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6901] = "Rear_climatronic_operating_and_display_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6902] = "Refrigerant_expansion_valve_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6903] = "Refrigerant_expansion_valve_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6904] = "Refrigerant_expansion_valve_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6905] = "Refrigerant_shut_off_valve_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6906] = "Refrigerant_shut_off_valve_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6907] = "Refrigerant_shut_off_valve_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6908] = "Refrigerant_shut_off_valve_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6909] = "Refrigerant_shut_off_valve_5_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690a] = "Sunlight_sensor_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690b] = "Near_field_communication_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690c] = "Clutch_control_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690d] = "Electrical_charger_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690e] = "Rear_light_left_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x690f] = "Rear_light_right_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6910] = "Rear_light_right_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6911] = "Sunlight_sensor_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6912] = "Radiator_shutter_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6913] = "Radiator_shutter_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6914] = "Radiator_shutter_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6915] = "Radiator_shutter_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6918] = "Special_key_operating_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6919] = "Radio_interface_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691a] = "Video_self_protection_recorder_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691b] = "Special_vehicle_assist_interface_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691c] = "Electric_system_disconnection_diode_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691e] = "Belt_pretensioner_2nd_row_left_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x691f] = "Belt_pretensioner_2nd_row_right_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6920] = "Electrical_variable_camshaft_phasing_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6921] = "Electrical_variable_camshaft_phasing_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6922] = "Wireless_operating_unit_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6923] = "Wireless_operating_unit_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6924] = "Front_windshield_washer_pump_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6925] = "Air_quality_sensor_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6926] = "Fragrancing_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6927] = "Coolant_valve_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6928] = "Near_field_communication_control_module_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6929] = "Interior_monitoring_rear_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692a] = "Cooler_fan_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692b] = "Control_unit_heating_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692c] = "Control_unit_heating_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692d] = "Control_unit_heating_3_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692e] = "Control_unit_heating_4_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x692f] = "Operating_unit_drive_mode_selection_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6930] = "Side_sensor_a-pillar_driver_front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6931] = "Side_sensor_a-pillar_passenger_front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6932] = "Sensor_high_voltage_system_1_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6933] = "Side_sensor_b-pillar_driver_front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6934] = "Side_sensor_b-pillar_passenger_front_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6935] = "Multi_function_steering_wheel_control_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6936] = "Gear_selection_display_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6937] = "Cooler_fan_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6938] = "Gear_selector_control_module_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6939] = "Interior_light_module_2_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693a] = "Radio_control_center_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693b] = "Multimedia_extension_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693c] = "Control_unit_differential_lock_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693d] = "Control_unit_ride_control_system_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693e] = "Control_unit_hands_on_detection_steering_wheel_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x693f] = "Front_climatronic_operating_and_display_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6940] = "Auxiliary_display_unit_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6941] = "Card_reader_tv_tuner_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6942] = "Park_lock_actuator_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6943] = "Media_connector_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6944] = "Catalyst_heating_Hardware_Version_Number"
UDS_RDBI.dataIdentifiers[0x6a01] = "Control_unit_for_wiper_motor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a02] = "Rain_light_recognition_sensor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a03] = "Light_switch_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a04] = "Garage_door_opener_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a05] = "Garage_door_opener_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a06] = "Ignition_key_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a07] = "Left_front_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a08] = "Right_front_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a09] = "Left_rear_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0a] = "LED_headlamp_powermodule_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0b] = "LED_headlamp_powermodule_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0c] = "LED_headlamp_powermodule_2_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0d] = "LED_headlamp_powermodule_2_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0e] = "Operating_and_display_unit_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a0f] = "Operating_and_display_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a10] = "Right_rear_seat_ventilation_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a11] = "Data_medium_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a12] = "Drivers_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a13] = "Front_passengers_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a14] = "Left_headlamp_power_output_stage_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a15] = "Right_headlamp_power_output_stage_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a16] = "Sensor_for_anti_theft_alarm_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a17] = "Rear_lid_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a18] = "Alarm_horn_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a19] = "Automatic_day_night_interior_mirror_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1a] = "Remote_control_auxiliary_heater_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1b] = "Fresh_air_blower_front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1c] = "Fresh_air_blower_back_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1d] = "Alternator_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1e] = "Interior_light_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a1f] = "Refrigerant_pressure_and_temperature_sender_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a20] = "Sun_roof_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a21] = "Steering_column_lock_actuator_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a22] = "Anti_theft_tilt_system_control_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a23] = "Tire_pressure_monitor_antenna_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a24] = "Heated_windshield_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a25] = "Rear_light_left_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a26] = "Ceiling_light_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a27] = "Left_front_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a28] = "Right_front_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a29] = "Control_module_for_auxiliary_air_heater_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2a] = "Belt Pretensioner left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2b] = "Belt Pretensioner right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2c] = "Occupant Detection_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2d] = "Selector_lever_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2e] = "NOx_sensor_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a2f] = "NOx_sensor_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a30] = "Ioniser_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a31] = "Multi_function_steering_wheel_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a32] = "Left_rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a33] = "Right_rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a34] = "Left_rear_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a35] = "Right_rear_massage_seat_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a36] = "Display_unit_1_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a37] = "Battery_monitoring_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a38] = "Roof_blind_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a39] = "Sun_roof_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3a] = "Steering_angle_sender_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3b] = "Lane_change_assistant 2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3c] = "Pitch_rate_sender_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3d] = "ESP_sensor_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3e] = "Electronic_ignition_lock_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a3f] = "Air_quality_sensor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a40] = "Display_unit_2_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a41] = "Telephone_handset_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a42] = "Chip_card_reader_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a43] = "Traffic_data_aerial_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a44] = "Hands_free_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a45] = "Telephone_handset_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a46] = "Display_unit_front_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a47] = "Multimedia_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a48] = "Digital_sound_system_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a49] = "Electrically_adjustable_steering_column_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4a] = "Interface_for_external_multimedia_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4b] = "Relative_Air_Humidity_Interior_Sender_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4c] = "Drivers_door_rear_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4d] = "Passengers_rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4e] = "Sensor_controlled_power_rear_lid_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a4f] = "Camera_for_night_vision_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a50] = "Relative_humidity_sensor_in_fresh_air_intake_duct_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a51] = "Rear_spoiler_adjustment_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a52] = "Roof_blind_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a53] = "Motor_for_wind_deflector_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a54] = "Voltage_stabilizer_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a55] = "Switch_module_for_driver_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a56] = "Switch_module_for_front_passenger_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a57] = "Switch_module_for_rear_seat_driver_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a58] = "Switch_module_for_rear_seat_front_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a59] = "Switch_module_2_for_driver_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5a] = "Battery_charger_unit_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5b] = "Battery_charger_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5c] = "Battery_charger_unit_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5d] = "Air_conditioning_compressor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5e] = "Neck_heating_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a5f] = "Neck_heating_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a60] = "Switch_module_2_for_front_passenger_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a61] = "Switch_module_2_for_rear_seat_front_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a62] = "Compact_disc_database_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a63] = "Rear_climatronic_operating_and_display_unit_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a64] = "Rear_climatronic_operating_and_display_unit_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a65] = "Door_handle_front_left_Kessy_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a66] = "Door_handle_front_right_Kessy_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a67] = "Door_handle_rear_left_Kessy_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a68] = "Door_handle_rear_right_Kessy_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a69] = "Power_converter_DC_AC_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6a] = "Battery_monitoring_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6b] = "Matrix_headlamp_powermodule_1_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6c] = "Matrix_headlamp_powermodule_1_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6d] = "High_beam_powermodule_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6e] = "High_beam_powermodule_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a6f] = "Air_suspension_compressor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a70] = "Rear_brake_actuator_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a71] = "Rear_brake_actuator_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a72] = "Analog_clock_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a73] = "Rear_door_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a79] = "Data_medium_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7a] = "Operating_unit_center_console_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7b] = "Operating_unit_center_console_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7c] = "Operating_unit_center_console_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7d] = "Operating_unit_center_console_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7e] = "Interface_for_radiodisplay_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a7f] = "Parkassist_entry_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a86] = "Belt_pretensioner_3rd_row_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a87] = "Belt_pretensioner_3rd_row_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a88] = "Injection_valve_heater_control_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a89] = "Steering_column_switch_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8a] = "Brake_assistance_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8b] = "Trailer_articulation_angle_sensor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8c] = "Cup_holder_with_heater_and_cooling_element_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8d] = "Range_of_vision_sensing_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8e] = "Convenience_and_driver_assist_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a8f] = "Cradle_rear_climatronic_operating_and_display_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a90] = "Trailer_weight_nose_weight_detection_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a91] = "Sensor_carbon_dioxide_concentration_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a92] = "Sensor_fine_dust_concentration_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a93] = "Volume_control_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a94] = "Belt_buckle_presenter_2nd_row_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a95] = "Belt_buckle_presenter_2nd_row_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a96] = "Operating_and_display_unit_6_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a97] = "Active_accelerator_pedal_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a98] = "Multimedia_operating_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a99] = "Display_unit_3_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9a] = "Display_unit_4_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9b] = "Display_unit_5_for_multimedia_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9c] = "Control_module_for_auxiliary_blower_motors_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9d] = "Operating_and_display_unit_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9e] = "Operating_and_display_unit_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6a9f] = "Operating_and_display_unit_5_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa0] = "Side Sensor Driver Front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa1] = "Side Sensor Passenger Front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa2] = "Side Sensor Driver Rear_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa3] = "Side Sensor Passenger Rear_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa4] = "Front Sensor Driver_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa5] = "Front Sensor Passenger_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa6] = "Pedestrian Protection Driver_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa7] = "Pedestrian Protection Passenger_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa8] = "Rear Sensor Center_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aa9] = "Pedestrian Protection Center_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aaa] = "Pedestrian Protection Contact_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aab] = "Pedestrian_protection_driver_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aac] = "Pedestrian_protection_passenger_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aad] = "Central_sensor_XY_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aae] = "Refrigerant_pressure_and_temperature_sender_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aaf] = "Refrigerant_pressure_and_temperature_sender_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab0] = "Switch_for_rear_multicontour_seat_driver_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab1] = "Valve_block_1_in_driver_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab2] = "Valve_block_2_in_driver_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab3] = "Valve_block_3_in_driver_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab4] = "Switch_for_rear_multicontour_seat_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab5] = "Valve_block_1_in_passenger_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab6] = "Valve_block_2_in_passenger_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab7] = "Valve_block_3_in_passenger_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab8] = "Switch_for_front_multicontour_seat_driver_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ab9] = "Valve_block_1_in_driver_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aba] = "Valve_block_2_in_driver_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6abb] = "Valve_block_3_in_driver_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6abc] = "Switch_for_front_multicontour_seat_passenger_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6abd] = "Valve_block_1_in_passenger_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6abe] = "Valve_block_2_in_passenger_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6abf] = "Valve_block_3_in_passenger_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac0] = "Coolant_heater_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac1] = "Seat_backrest_fan_1_front_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac2] = "Seat_backrest_fan_2_front_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac3] = "Seat_cushion_fan_1_front_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac4] = "Seat_cushion_fan_2_front_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac5] = "Seat_backrest_fan_1_front_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac6] = "Seat_backrest_fan_2_front_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac7] = "Seat_cushion_fan_1_front_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac8] = "Seat_cushion_fan_2_front_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ac9] = "Operating_and_display_unit_1_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aca] = "Operating_and_display_unit_2_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6acb] = "Operating_and_display_unit_3_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6acc] = "Operating_and_display_unit_4_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6acd] = "Operating_and_display_unit_5_for_air_conditioning_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ace] = "Pedestrian_protection_left_hand_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6acf] = "Pedestrian_protection_right_hand_side_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad0] = "Battery_junction_box_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad1] = "Cell_module_controller_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad2] = "Cell_module_controller_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad3] = "Cell_module_controller_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad4] = "Cell_module_controller_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad5] = "Cell_module_controller_5_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad6] = "Cell_module_controller_6_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad7] = "Cell_module_controller_7_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad8] = "Cell_module_controller_8_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ad9] = "Cell_module_controller_9_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ada] = "Cell_module_controller_10_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6adb] = "Cell_module_controller_11_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6adc] = "Cell_module_controller_12_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6add] = "Seat_backrest_fan_1_rear_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ade] = "Seat_backrest_fan_2_rear_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6adf] = "Seat_cushion_fan_1_rear_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae0] = "Seat_cushion_fan_2_rear_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae1] = "Seat_backrest_fan_1_rear_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae2] = "Seat_backrest_fan_2_rear_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae3] = "Seat_cushion_fan_1_rear_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae4] = "Seat_cushion_fan_2_rear_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae5] = "Auxiliary_blower_motor_control_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae6] = "Auxiliary_blower_motor_control_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae7] = "Infrared_sender_for_front_observation_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae8] = "Starter_generator_control_module_sub_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6ae9] = "Media_player_1_sub_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aea] = "Media_player_2_sub_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aeb] = "Dedicated_short_range_communication_aerial_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aec] = "Refrigerant_pressure_and_temperature_sender_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aed] = "Refrigerant_pressure_and_temperature_sender_5_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aee] = "Refrigerant_pressure_and_temperature_sender_6_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aef] = "Air_coolant_actuator_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af0] = "Air_coolant_actuator_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af1] = "Cell_module_controller_13_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af2] = "Cell_module_controller_14_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af3] = "Cell_module_controller_15_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af5] = "Seat_heating_rear_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af6] = "LED_warning_indicator_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af7] = "Automatic_transmission_fluid_pump_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af8] = "Manual_transmission_fluid_pump_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6af9] = "Convenience_and_driver_assist_operating_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6afb] = "Air_coolant_actuator_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6afc] = "Valve_block_4_in_driver_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6afd] = "Valve_block_4_in_passenger_side_rear_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6afe] = "Valve_block_4_in_driver_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6aff] = "Valve_block_4_in_passenger_side_front_seat_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b01] = "Rear_climatronic_operating_and_display_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b02] = "Refrigerant_expansion_valve_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b03] = "Refrigerant_expansion_valve_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b04] = "Refrigerant_expansion_valve_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b05] = "Refrigerant_shut_off_valve_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b06] = "Refrigerant_shut_off_valve_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b07] = "Refrigerant_shut_off_valve_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b08] = "Refrigerant_shut_off_valve_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b09] = "Refrigerant_shut_off_valve_5_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0a] = "Sunlight_sensor_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0b] = "Near_field_communication_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0c] = "Clutch_control_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0d] = "Electrical_charger_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0e] = "Rear_light_left_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b0f] = "Rear_light_right_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b10] = "Rear_light_right_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b11] = "Sunlight_sensor_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b12] = "Radiator_shutter_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b13] = "Radiator_shutter_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b14] = "Radiator_shutter_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b15] = "Radiator_shutter_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b18] = "Special_key_operating_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b19] = "Radio_interface_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1a] = "Video_self_protection_recorder_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1b] = "Special_vehicle_assist_interface_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1c] = "Electric_system_disconnection_diode_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1d] = "Cradle_rear_climatronic_operating_and_display_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1e] = "Belt_pretensioner_2nd_row_left_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b1f] = "Belt_pretensioner_2nd_row_right_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b20] = "Electrical_variable_camshaft_phasing_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b21] = "Electrical_variable_camshaft_phasing_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b22] = "Wireless_operating_unit_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b23] = "Wireless_operating_unit_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b24] = "Front_windshield_washer_pump_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b25] = "Air_quality_sensor_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b26] = "Fragrancing_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b27] = "Coolant_valve_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b28] = "Near_field_communication_control_module_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b29] = "Interior_monitoring_rear_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2a] = "Cooler_fan_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2b] = "Control_unit_heating_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2c] = "Control_unit_heating_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2d] = "Control_unit_heating_3_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2e] = "Control_unit_heating_4_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b2f] = "Operating_unit_drive_mode_selection_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b30] = "Side_sensor_a-pillar_driver_front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b31] = "Side_sensor_a-pillar_passenger_front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b32] = "Sensor_high_voltage_system_1_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b33] = "Side_sensor_b-pillar_driver_front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b34] = "Side_sensor_b-pillar_passenger_front_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b35] = "Multi_function_steering_wheel_control_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b36] = "Gear_selection_display_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b37] = "Cooler_fan_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b38] = "Gear_selector_control_module_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b39] = "Interior_light_module_2_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3a] = "Radio_control_center_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3b] = "Multimedia_extension_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3c] = "Control_unit_differential_lock_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3d] = "Control_unit_ride_control_system_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3e] = "Control_unit_hands_on_detection_steering_wheel_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b3f] = "Front_climatronic_operating_and_display_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b40] = "Auxiliary_display_unit_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b41] = "Card_reader_tv_tuner_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b42] = "Park_lock_actuator_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b43] = "Media_connector_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6b44] = "Catalyst_heating_Serial_Number"
UDS_RDBI.dataIdentifiers[0x6c01] = "Control_unit_for_wiper_motor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c02] = "Rain_light_recognition_sensor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c03] = "Light_switch_System_Name"
UDS_RDBI.dataIdentifiers[0x6c04] = "Garage_door_opener_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c05] = "Garage_door_opener_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c06] = "Ignition_key_System_Name"
UDS_RDBI.dataIdentifiers[0x6c07] = "Left_front_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c08] = "Right_front_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c09] = "Left_rear_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0a] = "LED_headlamp_powermodule_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0b] = "LED_headlamp_powermodule_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0c] = "LED_headlamp_powermodule_2_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0d] = "LED_headlamp_powermodule_2_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0e] = "Operating_and_display_unit_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c0f] = "Operating_and_display_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c10] = "Right_rear_seat_ventilation_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c11] = "Data_medium_System_Name"
UDS_RDBI.dataIdentifiers[0x6c12] = "Drivers_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c13] = "Front_passengers_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c14] = "Left_headlamp_power_output_stage_System_Name"
UDS_RDBI.dataIdentifiers[0x6c15] = "Right_headlamp_power_output_stage_System_Name"
UDS_RDBI.dataIdentifiers[0x6c16] = "Sensor_for_anti_theft_alarm_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c17] = "Rear_lid_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c18] = "Alarm_horn_System_Name"
UDS_RDBI.dataIdentifiers[0x6c19] = "Automatic_day_night_interior_mirror_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1a] = "Remote_control_auxiliary_heater_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1b] = "Fresh_air_blower_front_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1c] = "Fresh_air_blower_back_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1d] = "Alternator_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1e] = "Interior_light_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c1f] = "Refrigerant_pressure_and_temperature_sender_System_Name"
UDS_RDBI.dataIdentifiers[0x6c20] = "Sun_roof_System_Name"
UDS_RDBI.dataIdentifiers[0x6c21] = "Steering_column_lock_actuator_System_Name"
UDS_RDBI.dataIdentifiers[0x6c22] = "Anti_theft_tilt_system_control_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c23] = "Tire_pressure_monitor_antenna_System_Name"
UDS_RDBI.dataIdentifiers[0x6c24] = "Heated_windshield_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c25] = "Rear_light_left_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c26] = "Ceiling_light_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c27] = "Left_front_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c28] = "Right_front_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c29] = "Control_module_for_auxiliary_air_heater_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2a] = "Belt Pretensioner left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2b] = "Belt Pretensioner right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2c] = "Occupant Detection_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2d] = "Selector_lever_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2e] = "NOx_sensor_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c2f] = "NOx_sensor_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c30] = "Ioniser_System_Name"
UDS_RDBI.dataIdentifiers[0x6c31] = "Multi_function_steering_wheel_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c32] = "Left_rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c33] = "Right_rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c34] = "Left_rear_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c35] = "Right_rear_massage_seat_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c36] = "Display_unit_1_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c37] = "Battery_monitoring_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c38] = "Roof_blind_System_Name"
UDS_RDBI.dataIdentifiers[0x6c39] = "Sun_roof_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3a] = "Steering_angle_sender_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3b] = "Lane_change_assistant 2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3c] = "Pitch_rate_sender_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3d] = "ESP_sensor_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3e] = "Electronic_ignition_lock_System_Name"
UDS_RDBI.dataIdentifiers[0x6c3f] = "Air_quality_sensor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c40] = "Display_unit_2_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c41] = "Telephone_handset_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c42] = "Chip_card_reader_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c43] = "Traffic_data_aerial_System_Name"
UDS_RDBI.dataIdentifiers[0x6c44] = "Hands_free_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c45] = "Telephone_handset_System_Name"
UDS_RDBI.dataIdentifiers[0x6c46] = "Display_unit_front_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c47] = "Multimedia_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c48] = "Digital_sound_system_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c49] = "Electrically_adjustable_steering_column_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4a] = "Interface_for_external_multimedia_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4b] = "Relative_Air_Humidity_Interior_Sender_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4c] = "Drivers_door_rear_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4d] = "Passengers_rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4e] = "Sensor_controlled_power_rear_lid_System_Name"
UDS_RDBI.dataIdentifiers[0x6c4f] = "Camera_for_night_vision_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c50] = "Relative_humidity_sensor_in_fresh_air_intake_duct_System_Name"
UDS_RDBI.dataIdentifiers[0x6c51] = "Rear_spoiler_adjustment_System_Name"
UDS_RDBI.dataIdentifiers[0x6c52] = "Roof_blind_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c53] = "Motor_for_wind_deflector_System_Name"
UDS_RDBI.dataIdentifiers[0x6c54] = "Voltage_stabilizer_System_Name"
UDS_RDBI.dataIdentifiers[0x6c55] = "Switch_module_for_driver_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6c56] = "Switch_module_for_front_passenger_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6c57] = "Switch_module_for_rear_seat_driver_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6c58] = "Switch_module_for_rear_seat_front_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6c59] = "Switch_module_2_for_driver_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5a] = "Battery_charger_unit_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5b] = "Battery_charger_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5c] = "Battery_charger_unit_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5d] = "Air_conditioning_compressor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5e] = "Neck_heating_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c5f] = "Neck_heating_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c60] = "Switch_module_2_for_front_passenger_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6c61] = "Switch_module_2_for_rear_seat_front_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6c62] = "Compact_disc_database_System_Name"
UDS_RDBI.dataIdentifiers[0x6c63] = "Rear_climatronic_operating_and_display_unit_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c64] = "Rear_climatronic_operating_and_display_unit_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c65] = "Door_handle_front_left_Kessy_System_Name"
UDS_RDBI.dataIdentifiers[0x6c66] = "Door_handle_front_right_Kessy_System_Name"
UDS_RDBI.dataIdentifiers[0x6c67] = "Door_handle_rear_left_Kessy_System_Name"
UDS_RDBI.dataIdentifiers[0x6c68] = "Door_handle_rear_right_Kessy_System_Name"
UDS_RDBI.dataIdentifiers[0x6c69] = "Power_converter_DC_AC_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6a] = "Battery_monitoring_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6b] = "Matrix_headlamp_powermodule_1_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6c] = "Matrix_headlamp_powermodule_1_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6d] = "High_beam_powermodule_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6e] = "High_beam_powermodule_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c6f] = "Air_suspension_compressor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c70] = "Rear_brake_actuator_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c71] = "Rear_brake_actuator_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c72] = "Analog_clock_System_Name"
UDS_RDBI.dataIdentifiers[0x6c73] = "Rear_door_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6c79] = "Data_medium_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7a] = "Operating_unit_center_console_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7b] = "Operating_unit_center_console_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7c] = "Operating_unit_center_console_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7d] = "Operating_unit_center_console_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7e] = "Interface_for_radiodisplay_System_Name"
UDS_RDBI.dataIdentifiers[0x6c7f] = "Parkassist_entry_System_Name"
UDS_RDBI.dataIdentifiers[0x6c86] = "Belt_pretensioner_3rd_row_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c87] = "Belt_pretensioner_3rd_row_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c88] = "Injection_valve_heater_control_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c89] = "Steering_column_switch_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8a] = "Brake_assistance_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8b] = "Trailer_articulation_angle_sensor_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8c] = "Cup_holder_with_heater_and_cooling_element_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8d] = "Range_of_vision_sensing_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8e] = "Convenience_and_driver_assist_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c8f] = "Cradle_rear_climatronic_operating_and_display_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6c90] = "Trailer_weight_nose_weight_detection_System_Name"
UDS_RDBI.dataIdentifiers[0x6c91] = "Sensor_carbon_dioxide_concentration_System_Name"
UDS_RDBI.dataIdentifiers[0x6c92] = "Sensor_fine_dust_concentration_System_Name"
UDS_RDBI.dataIdentifiers[0x6c93] = "Volume_control_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6c94] = "Belt_buckle_presenter_2nd_row_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6c95] = "Belt_buckle_presenter_2nd_row_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6c96] = "Operating_and_display_unit_6_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6c97] = "Active_accelerator_pedal_System_Name"
UDS_RDBI.dataIdentifiers[0x6c98] = "Multimedia_operating_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6c99] = "Display_unit_3_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9a] = "Display_unit_4_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9b] = "Display_unit_5_for_multimedia_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9c] = "Control_module_for_auxiliary_blower_motors_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9d] = "Operating_and_display_unit_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9e] = "Operating_and_display_unit_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6c9f] = "Operating_and_display_unit_5_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca0] = "Side Sensor Driver Front_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca1] = "Side Sensor Passenger Front_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca2] = "Side Sensor Driver Rear_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca3] = "Side Sensor Passenger Rear_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca4] = "Front Sensor Driver_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca5] = "Front Sensor Passenger_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca6] = "Pedestrian Protection Driver_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca7] = "Pedestrian Protection Passenger_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca8] = "Rear Sensor Center_System_Name"
UDS_RDBI.dataIdentifiers[0x6ca9] = "Pedestrian Protection Center_System_Name"
UDS_RDBI.dataIdentifiers[0x6caa] = "Pedestrian Protection Contact_System_Name"
UDS_RDBI.dataIdentifiers[0x6cab] = "Pedestrian_protection_driver_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6cac] = "Pedestrian_protection_passenger_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6cad] = "Central_sensor_XY_System_Name"
UDS_RDBI.dataIdentifiers[0x6cae] = "Refrigerant_pressure_and_temperature_sender_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6caf] = "Refrigerant_pressure_and_temperature_sender_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb0] = "Switch_for_rear_multicontour_seat_driver_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb1] = "Valve_block_1_in_driver_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb2] = "Valve_block_2_in_driver_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb3] = "Valve_block_3_in_driver_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb4] = "Switch_for_rear_multicontour_seat_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb5] = "Valve_block_1_in_passenger_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb6] = "Valve_block_2_in_passenger_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb7] = "Valve_block_3_in_passenger_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb8] = "Switch_for_front_multicontour_seat_driver_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6cb9] = "Valve_block_1_in_driver_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cba] = "Valve_block_2_in_driver_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cbb] = "Valve_block_3_in_driver_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cbc] = "Switch_for_front_multicontour_seat_passenger_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6cbd] = "Valve_block_1_in_passenger_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cbe] = "Valve_block_2_in_passenger_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cbf] = "Valve_block_3_in_passenger_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc0] = "Coolant_heater_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc1] = "Seat_backrest_fan_1_front_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc2] = "Seat_backrest_fan_2_front_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc3] = "Seat_cushion_fan_1_front_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc4] = "Seat_cushion_fan_2_front_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc5] = "Seat_backrest_fan_1_front_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc6] = "Seat_backrest_fan_2_front_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc7] = "Seat_cushion_fan_1_front_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc8] = "Seat_cushion_fan_2_front_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6cc9] = "Operating_and_display_unit_1_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6cca] = "Operating_and_display_unit_2_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6ccb] = "Operating_and_display_unit_3_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6ccc] = "Operating_and_display_unit_4_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6ccd] = "Operating_and_display_unit_5_for_air_conditioning_System_Name"
UDS_RDBI.dataIdentifiers[0x6cce] = "Pedestrian_protection_left_hand_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6ccf] = "Pedestrian_protection_right_hand_side_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd0] = "Battery_junction_box_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd1] = "Cell_module_controller_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd2] = "Cell_module_controller_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd3] = "Cell_module_controller_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd4] = "Cell_module_controller_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd5] = "Cell_module_controller_5_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd6] = "Cell_module_controller_6_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd7] = "Cell_module_controller_7_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd8] = "Cell_module_controller_8_System_Name"
UDS_RDBI.dataIdentifiers[0x6cd9] = "Cell_module_controller_9_System_Name"
UDS_RDBI.dataIdentifiers[0x6cda] = "Cell_module_controller_10_System_Name"
UDS_RDBI.dataIdentifiers[0x6cdb] = "Cell_module_controller_11_System_Name"
UDS_RDBI.dataIdentifiers[0x6cdc] = "Cell_module_controller_12_System_Name"
UDS_RDBI.dataIdentifiers[0x6cdd] = "Seat_backrest_fan_1_rear_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cde] = "Seat_backrest_fan_2_rear_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6cdf] = "Seat_cushion_fan_1_rear_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce0] = "Seat_cushion_fan_2_rear_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce1] = "Seat_backrest_fan_1_rear_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce2] = "Seat_backrest_fan_2_rear_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce3] = "Seat_cushion_fan_1_rear_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce4] = "Seat_cushion_fan_2_rear_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce5] = "Auxiliary_blower_motor_control_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce6] = "Auxiliary_blower_motor_control_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce7] = "Infrared_sender_for_front_observation_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce8] = "Starter_generator_control_module_sub_System_Name"
UDS_RDBI.dataIdentifiers[0x6ce9] = "Media_player_1_sub_System_Name"
UDS_RDBI.dataIdentifiers[0x6cea] = "Media_player_2_sub_System_Name"
UDS_RDBI.dataIdentifiers[0x6ceb] = "Dedicated_short_range_communication_aerial_System_Name"
UDS_RDBI.dataIdentifiers[0x6cec] = "Refrigerant_pressure_and_temperature_sender_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6ced] = "Refrigerant_pressure_and_temperature_sender_5_System_Name"
UDS_RDBI.dataIdentifiers[0x6cee] = "Refrigerant_pressure_and_temperature_sender_6_System_Name"
UDS_RDBI.dataIdentifiers[0x6cef] = "Air_coolant_actuator_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf0] = "Air_coolant_actuator_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf1] = "Cell_module_controller_13_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf2] = "Cell_module_controller_14_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf3] = "Cell_module_controller_15_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf5] = "Seat_heating_rear_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf6] = "LED_warning_indicator_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf7] = "Automatic_transmission_fluid_pump_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf8] = "Manual_transmission_fluid_pump_System_Name"
UDS_RDBI.dataIdentifiers[0x6cf9] = "Convenience_and_driver_assist_operating_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6cfb] = "Air_coolant_actuator_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6cfc] = "Valve_block_4_in_driver_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cfd] = "Valve_block_4_in_passenger_side_rear_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cfe] = "Valve_block_4_in_driver_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6cff] = "Valve_block_4_in_passenger_side_front_seat_System_Name"
UDS_RDBI.dataIdentifiers[0x6d01] = "Rear_climatronic_operating_and_display_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6d02] = "Refrigerant_expansion_valve_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d03] = "Refrigerant_expansion_valve_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d04] = "Refrigerant_expansion_valve_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6d05] = "Refrigerant_shut_off_valve_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d06] = "Refrigerant_shut_off_valve_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d07] = "Refrigerant_shut_off_valve_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6d08] = "Refrigerant_shut_off_valve_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6d09] = "Refrigerant_shut_off_valve_5_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0a] = "Sunlight_sensor_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0b] = "Near_field_communication_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0c] = "Clutch_control_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0d] = "Electrical_charger_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0e] = "Rear_light_left_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d0f] = "Rear_light_right_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d10] = "Rear_light_right_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d11] = "Sunlight_sensor_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d12] = "Radiator_shutter_System_Name"
UDS_RDBI.dataIdentifiers[0x6d13] = "Radiator_shutter_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d14] = "Radiator_shutter_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6d15] = "Radiator_shutter_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6d18] = "Special_key_operating_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6d19] = "Radio_interface_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1a] = "Video_self_protection_recorder_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1b] = "Special_vehicle_assist_interface_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1c] = "Electric_system_disconnection_diode_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1d] = "Cradle_rear_climatronic_operating_and_display_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1e] = "Belt_pretensioner_2nd_row_left_System_Name"
UDS_RDBI.dataIdentifiers[0x6d1f] = "Belt_pretensioner_2nd_row_right_System_Name"
UDS_RDBI.dataIdentifiers[0x6d20] = "Electrical_variable_camshaft_phasing_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d21] = "Electrical_variable_camshaft_phasing_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d22] = "Wireless_operating_unit_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d23] = "Wireless_operating_unit_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d24] = "Front_windshield_washer_pump_System_Name"
UDS_RDBI.dataIdentifiers[0x6d25] = "Air_quality_sensor_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d26] = "Fragrancing_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6d27] = "Coolant_valve_System_Name"
UDS_RDBI.dataIdentifiers[0x6d28] = "Near_field_communication_control_module_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6d29] = "Interior_monitoring_rear_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2a] = "Cooler_fan_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2b] = "Control_unit_heating_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2c] = "Control_unit_heating_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2d] = "Control_unit_heating_3_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2e] = "Control_unit_heating_4_System_Name"
UDS_RDBI.dataIdentifiers[0x6d2f] = "Operating_unit_drive_mode_selection_System_Name"
UDS_RDBI.dataIdentifiers[0x6d30] = "Side_sensor_a-pillar_driver_front_System_Name"
UDS_RDBI.dataIdentifiers[0x6d31] = "Side_sensor_a-pillar_passenger_front_System_Name"
UDS_RDBI.dataIdentifiers[0x6d32] = "Sensor_high_voltage_system_1_System_Name"
UDS_RDBI.dataIdentifiers[0x6d33] = "Side_sensor_b-pillar_driver_front_System_Name"
UDS_RDBI.dataIdentifiers[0x6d34] = "Side_sensor_b-pillar_passenger_front_System_Name"
UDS_RDBI.dataIdentifiers[0x6d35] = "Multi_function_steering_wheel_control_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d36] = "Gear_selection_display_System_Name"
UDS_RDBI.dataIdentifiers[0x6d37] = "Cooler_fan_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d38] = "Gear_selector_control_module_System_Name"
UDS_RDBI.dataIdentifiers[0x6d39] = "Interior_light_module_2_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3a] = "Radio_control_center_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3b] = "Multimedia_extension_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3c] = "Control_unit_differential_lock_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3d] = "Control_unit_ride_control_system_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3e] = "Control_unit_hands_on_detection_steering_wheel_System_Name"
UDS_RDBI.dataIdentifiers[0x6d3f] = "Front_climatronic_operating_and_display_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6d40] = "Auxiliary_display_unit_System_Name"
UDS_RDBI.dataIdentifiers[0x6d41] = "Card_reader_tv_tuner_System_Name"
UDS_RDBI.dataIdentifiers[0x6d42] = "Park_lock_actuator_System_Name"
UDS_RDBI.dataIdentifiers[0x6d43] = "Media_connector_System_Name"
UDS_RDBI.dataIdentifiers[0x6d44] = "Catalyst_heating_System_Name"
UDS_RDBI.dataIdentifiers[0x6e01] = "Control_unit_for_wiper_motor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e02] = "Rain_light_recognition_sensor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e03] = "Light_switch_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e04] = "Garage_door_opener_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e05] = "Garage_door_opener_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e06] = "Ignition_key_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e07] = "Left_front_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e08] = "Right_front_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e09] = "Left_rear_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0a] = "LED_headlamp_powermodule_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0b] = "LED_headlamp_powermodule_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0c] = "LED_headlamp_powermodule_2_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0d] = "LED_headlamp_powermodule_2_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0e] = "Operating_and_display_unit_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e0f] = "Operating_and_display_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e10] = "Right_rear_seat_ventilation_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e11] = "Data_medium_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e12] = "Drivers_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e13] = "Front_passengers_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e14] = "Left_headlamp_power_output_stage_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e15] = "Right_headlamp_power_output_stage_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e16] = "Sensor_for_anti_theft_alarm_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e17] = "Rear_lid_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e18] = "Alarm_horn_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e19] = "Automatic_day_night_interior_mirror_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1a] = "Remote_control_auxiliary_heater_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1b] = "Fresh_air_blower_front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1c] = "Fresh_air_blower_back_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1d] = "Alternator_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1e] = "Interior_light_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e1f] = "Refrigerant_pressure_and_temperature_sender_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e20] = "Sun_roof_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e21] = "Steering_column_lock_actuator_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e22] = "Anti_theft_tilt_system_control_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e23] = "Tire_pressure_monitor_antenna_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e24] = "Heated_windshield_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e25] = "Rear_light_left_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e26] = "Ceiling_light_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e27] = "Left_front_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e28] = "Right_front_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e29] = "Control_module_for_auxiliary_air_heater_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2a] = "Belt Pretensioner left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2b] = "Belt Pretensioner right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2c] = "Occupant Detection_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2d] = "Selector_lever_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2e] = "NOx_sensor_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e2f] = "NOx_sensor_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e30] = "Ioniser_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e31] = "Multi_function_steering_wheel_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e32] = "Left_rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e33] = "Right_rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e34] = "Left_rear_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e35] = "Right_rear_massage_seat_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e36] = "Display_unit_1_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e37] = "Battery_monitoring_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e38] = "Roof_blind_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e39] = "Sun_roof_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3a] = "Steering_angle_sender_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3b] = "Lane_change_assistant 2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3c] = "Pitch_rate_sender_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3d] = "ESP_sensor_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3e] = "Electronic_ignition_lock_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e3f] = "Air_quality_sensor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e40] = "Display_unit_2_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e41] = "Telephone_handset_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e42] = "Chip_card_reader_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e43] = "Traffic_data_aerial_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e44] = "Hands_free_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e45] = "Telephone_handset_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e46] = "Display_unit_front_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e47] = "Multimedia_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e48] = "Digital_sound_system_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e49] = "Electrically_adjustable_steering_column_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4a] = "Interface_for_external_multimedia_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4b] = "Relative_Air_Humidity_Interior_Sender_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4c] = "Drivers_door_rear_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4d] = "Passengers_rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4e] = "Sensor_controlled_power_rear_lid_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e4f] = "Camera_for_night_vision_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e50] = "Relative_humidity_sensor_in_fresh_air_intake_duct_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e51] = "Rear_spoiler_adjustment_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e52] = "Roof_blind_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e53] = "Motor_for_wind_deflector_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e54] = "Voltage_stabilizer_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e55] = "Switch_module_for_driver_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e56] = "Switch_module_for_front_passenger_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e57] = "Switch_module_for_rear_seat_driver_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e58] = "Switch_module_for_rear_seat_front_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e59] = "Switch_module_2_for_driver_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5a] = "Battery_charger_unit_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5b] = "Battery_charger_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5c] = "Battery_charger_unit_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5d] = "Air_conditioning_compressor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5e] = "Neck_heating_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e5f] = "Neck_heating_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e60] = "Switch_module_2_for_front_passenger_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e61] = "Switch_module_2_for_rear_seat_front_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e62] = "Compact_disc_database_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e63] = "Rear_climatronic_operating_and_display_unit_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e64] = "Rear_climatronic_operating_and_display_unit_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e65] = "Door_handle_front_left_Kessy_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e66] = "Door_handle_front_right_Kessy_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e67] = "Door_handle_rear_left_Kessy_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e68] = "Door_handle_rear_right_Kessy_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e69] = "Power_converter_DC_AC_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6a] = "Battery_monitoring_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6b] = "Matrix_headlamp_powermodule_1_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6c] = "Matrix_headlamp_powermodule_1_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6d] = "High_beam_powermodule_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6e] = "High_beam_powermodule_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e6f] = "Air_suspension_compressor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e70] = "Rear_brake_actuator_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e71] = "Rear_brake_actuator_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e72] = "Analog_clock_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e73] = "Rear_door_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e79] = "Data_medium_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7a] = "Operating_unit_center_console_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7b] = "Operating_unit_center_console_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7c] = "Operating_unit_center_console_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7d] = "Operating_unit_center_console_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7e] = "Interface_for_radiodisplay_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e7f] = "Parkassist_entry_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e86] = "Belt_pretensioner_3rd_row_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e87] = "Belt_pretensioner_3rd_row_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e88] = "Injection_valve_heater_control_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e89] = "Steering_column_switch_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8a] = "Brake_assistance_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8b] = "Trailer_articulation_angle_sensor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8c] = "Cup_holder_with_heater_and_cooling_element_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8d] = "Range_of_vision_sensing_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8e] = "Convenience_and_driver_assist_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e8f] = "Cradle_rear_climatronic_operating_and_display_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e90] = "Trailer_weight_nose_weight_detection_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e91] = "Sensor_carbon_dioxide_concentration_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e92] = "Sensor_fine_dust_concentration_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e93] = "Volume_control_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e94] = "Belt_buckle_presenter_2nd_row_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e95] = "Belt_buckle_presenter_2nd_row_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e96] = "Operating_and_display_unit_6_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e97] = "Active_accelerator_pedal_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e98] = "Multimedia_operating_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e99] = "Display_unit_3_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9a] = "Display_unit_4_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9b] = "Display_unit_5_for_multimedia_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9c] = "Control_module_for_auxiliary_blower_motors_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9d] = "Operating_and_display_unit_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9e] = "Operating_and_display_unit_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6e9f] = "Operating_and_display_unit_5_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea0] = "Side Sensor Driver Front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea1] = "Side Sensor Passenger Front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea2] = "Side Sensor Driver Rear_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea3] = "Side Sensor Passenger Rear_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea4] = "Front Sensor Driver_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea5] = "Front Sensor Passenger_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea6] = "Pedestrian Protection Driver_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea7] = "Pedestrian Protection Passenger_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea8] = "Rear Sensor Center_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ea9] = "Pedestrian Protection Center_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eaa] = "Pedestrian Protection Contact_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eab] = "Pedestrian_protection_driver_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eac] = "Pedestrian_protection_passenger_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ead] = "Central_sensor_XY_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eae] = "Refrigerant_pressure_and_temperature_sender_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eaf] = "Refrigerant_pressure_and_temperature_sender_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb0] = "Switch_for_rear_multicontour_seat_driver_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb1] = "Valve_block_1_in_driver_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb2] = "Valve_block_2_in_driver_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb3] = "Valve_block_3_in_driver_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb4] = "Switch_for_rear_multicontour_seat_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb5] = "Valve_block_1_in_passenger_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb6] = "Valve_block_2_in_passenger_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb7] = "Valve_block_3_in_passenger_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb8] = "Switch_for_front_multicontour_seat_driver_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eb9] = "Valve_block_1_in_driver_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eba] = "Valve_block_2_in_driver_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ebb] = "Valve_block_3_in_driver_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ebc] = "Switch_for_front_multicontour_seat_passenger_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ebd] = "Valve_block_1_in_passenger_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ebe] = "Valve_block_2_in_passenger_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ebf] = "Valve_block_3_in_passenger_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec0] = "Coolant_heater_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec1] = "Seat_backrest_fan_1_front_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec2] = "Seat_backrest_fan_2_front_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec3] = "Seat_cushion_fan_1_front_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec4] = "Seat_cushion_fan_2_front_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec5] = "Seat_backrest_fan_1_front_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec6] = "Seat_backrest_fan_2_front_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec7] = "Seat_cushion_fan_1_front_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec8] = "Seat_cushion_fan_2_front_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ec9] = "Operating_and_display_unit_1_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eca] = "Operating_and_display_unit_2_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ecb] = "Operating_and_display_unit_3_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ecc] = "Operating_and_display_unit_4_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ecd] = "Operating_and_display_unit_5_for_air_conditioning_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ece] = "Pedestrian_protection_left_hand_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ecf] = "Pedestrian_protection_right_hand_side_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed0] = "Battery_junction_box_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed1] = "Cell_module_controller_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed2] = "Cell_module_controller_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed3] = "Cell_module_controller_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed4] = "Cell_module_controller_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed5] = "Cell_module_controller_5_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed6] = "Cell_module_controller_6_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed7] = "Cell_module_controller_7_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed8] = "Cell_module_controller_8_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ed9] = "Cell_module_controller_9_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eda] = "Cell_module_controller_10_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6edb] = "Cell_module_controller_11_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6edc] = "Cell_module_controller_12_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6edd] = "Seat_backrest_fan_1_rear_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ede] = "Seat_backrest_fan_2_rear_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6edf] = "Seat_cushion_fan_1_rear_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee0] = "Seat_cushion_fan_2_rear_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee1] = "Seat_backrest_fan_1_rear_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee2] = "Seat_backrest_fan_2_rear_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee3] = "Seat_cushion_fan_1_rear_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee4] = "Seat_cushion_fan_2_rear_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee5] = "Auxiliary_blower_motor_control_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee6] = "Auxiliary_blower_motor_control_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee7] = "Infrared_sender_for_front_observation_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee8] = "Starter_generator_control_module_sub_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ee9] = "Media_player_1_sub_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eea] = "Media_player_2_sub_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eeb] = "Dedicated_short_range_communication_aerial_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eec] = "Refrigerant_pressure_and_temperature_sender_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eed] = "Refrigerant_pressure_and_temperature_sender_5_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eee] = "Refrigerant_pressure_and_temperature_sender_6_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eef] = "Air_coolant_actuator_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef0] = "Air_coolant_actuator_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef1] = "Cell_module_controller_13_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef2] = "Cell_module_controller_14_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef3] = "Cell_module_controller_15_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef5] = "Seat_heating_rear_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef6] = "LED_warning_indicator_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef7] = "Automatic_transmission_fluid_pump_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef8] = "Manual_transmission_fluid_pump_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6ef9] = "Convenience_and_driver_assist_operating_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6efb] = "Air_coolant_actuator_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6efc] = "Valve_block_4_in_driver_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6efd] = "Valve_block_4_in_passenger_side_rear_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6efe] = "Valve_block_4_in_driver_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6eff] = "Valve_block_4_in_passenger_side_front_seat_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f01] = "Rear_climatronic_operating_and_display_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f02] = "Refrigerant_expansion_valve_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f03] = "Refrigerant_expansion_valve_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f04] = "Refrigerant_expansion_valve_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f05] = "Refrigerant_shut_off_valve_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f06] = "Refrigerant_shut_off_valve_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f07] = "Refrigerant_shut_off_valve_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f08] = "Refrigerant_shut_off_valve_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f09] = "Refrigerant_shut_off_valve_5_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0a] = "Sunlight_sensor_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0b] = "Near_field_communication_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0c] = "Clutch_control_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0d] = "Electrical_charger_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0e] = "Rear_light_left_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f0f] = "Rear_light_right_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f10] = "Rear_light_right_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f11] = "Sunlight_sensor_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f12] = "Radiator_shutter_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f13] = "Radiator_shutter_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f14] = "Radiator_shutter_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f15] = "Radiator_shutter_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f18] = "Special_key_operating_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f19] = "Radio_interface_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1a] = "Video_self_protection_recorder_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1b] = "Special_vehicle_assist_interface_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1c] = "Electric_system_disconnection_diode_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1d] = "Cradle_rear_climatronic_operating_and_display_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1e] = "Belt_pretensioner_2nd_row_left_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f1f] = "Belt_pretensioner_2nd_row_right_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f20] = "Electrical_variable_camshaft_phasing_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f21] = "Electrical_variable_camshaft_phasing_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f22] = "Wireless_operating_unit_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f23] = "Wireless_operating_unit_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f24] = "Front_windshield_washer_pump_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f25] = "Air_quality_sensor_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f26] = "Fragrancing_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f27] = "Coolant_valve_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f28] = "Near_field_communication_control_module_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f29] = "Interior_monitoring_rear_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2a] = "Cooler_fan_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2b] = "Control_unit_heating_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2c] = "Control_unit_heating_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2d] = "Control_unit_heating_3_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2e] = "Control_unit_heating_4_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f2f] = "Operating_unit_drive_mode_selection_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f30] = "Side_sensor_a-pillar_driver_front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f31] = "Side_sensor_a-pillar_passenger_front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f32] = "Sensor_high_voltage_system_1_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f33] = "Side_sensor_b-pillar_driver_front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f34] = "Side_sensor_b-pillar_passenger_front_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f35] = "Multi_function_steering_wheel_control_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f36] = "Gear_selection_display_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f37] = "Cooler_fan_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f38] = "Gear_selector_control_module_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f39] = "Interior_light_module_2_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3a] = "Radio_control_center_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3b] = "Multimedia_extension_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3c] = "Control_unit_differential_lock_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3d] = "Control_unit_ride_control_system_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3e] = "Control_unit_hands_on_detection_steering_wheel_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f3f] = "Front_climatronic_operating_and_display_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f40] = "Auxiliary_display_unit_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f41] = "Card_reader_tv_tuner_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f42] = "Park_lock_actuator_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f43] = "Media_connector_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0x6f44] = "Catalyst_heating_VW_Slave_FAZIT_string"
UDS_RDBI.dataIdentifiers[0xef90] = "Secure_hardware_extension_status"
UDS_RDBI.dataIdentifiers[0xf15a] = "Fingerprint"
UDS_RDBI.dataIdentifiers[0xf15b] = "Fingerprint And Programming Date Of Logical Software Blocks"
UDS_RDBI.dataIdentifiers[0xf17c] = "VW FAZIT Identification String"
UDS_RDBI.dataIdentifiers[0xf186] = "Active Diagnostic Session"
UDS_RDBI.dataIdentifiers[0xf187] = "VW Spare Part Number"
UDS_RDBI.dataIdentifiers[0xf189] = "VW Application Software Version Number"
UDS_RDBI.dataIdentifiers[0xf18a] = "System Supplier Identifier"
UDS_RDBI.dataIdentifiers[0xf18c] = "ECU Serial Number"
UDS_RDBI.dataIdentifiers[0xf190] = "Vehicle Identification Number"
UDS_RDBI.dataIdentifiers[0xf191] = "VW ECU Hardware Number"
UDS_RDBI.dataIdentifiers[0xf192] = "System Supplier ECU Hardware Number"
UDS_RDBI.dataIdentifiers[0xf193] = "System Supplier ECU Hardware Version Number"
UDS_RDBI.dataIdentifiers[0xf194] = "System Supplier ECU Software Number"
UDS_RDBI.dataIdentifiers[0xf195] = "System Supplier ECU Software Version Number"
UDS_RDBI.dataIdentifiers[0xf197] = "VW System Name Or Engine Type"
UDS_RDBI.dataIdentifiers[0xf19e] = "ASAM ODX File Identifier"
UDS_RDBI.dataIdentifiers[0xf1a0] = "VW Data Set Number Or ECU Data Container Number"
UDS_RDBI.dataIdentifiers[0xf1a1] = "VW Data Set Version Number"
UDS_RDBI.dataIdentifiers[0xf1a2] = "ASAM ODX File Version"
UDS_RDBI.dataIdentifiers[0xf1a3] = "VW ECU Hardware Version Number"
UDS_RDBI.dataIdentifiers[0xf1aa] = "VW Workshop System Name"
UDS_RDBI.dataIdentifiers[0xf1ab] = "VW Logical Software Block Version"
UDS_RDBI.dataIdentifiers[0xf1ad] = "Engine Code Letters"
UDS_RDBI.dataIdentifiers[0xf1af] = "AUTOSAR_standard_application_software_identification"
UDS_RDBI.dataIdentifiers[0xf1b0] = "VWClear_diagnostic_information_date_functional"
UDS_RDBI.dataIdentifiers[0xf1b1] = "VW_Application_data_set_identification"
UDS_RDBI.dataIdentifiers[0xf1b2] = "Function_software_identification"
UDS_RDBI.dataIdentifiers[0xf1b3] = "VW_Data_set_name"
UDS_RDBI.dataIdentifiers[0xf1b5] = "Busmaster_description"
UDS_RDBI.dataIdentifiers[0xf1b6] = "System_identification"
UDS_RDBI.dataIdentifiers[0xf1b7] = "Gateway_component_list_ECU_node_address"
UDS_RDBI.dataIdentifiers[0xf1d5] = "FDS_project_data"
UDS_RDBI.dataIdentifiers[0xf1df] = "ECU Programming Information"
UDS_RC.routineControlTypes[0x0202] = "Check Memory"
UDS_RC.routineControlTypes[0x0203] = "Check Programming Preconditions"
UDS_RC.routineControlTypes[0x0317] = "Reset of Adaption Values"
UDS_RC.routineControlTypes[0x0366] = "Reset of all Adaptions"
UDS_RC.routineControlTypes[0x03e7] = "Reset to Factory Settings"
UDS_RC.routineControlTypes[0x045a] = "Clear user defined DTC information"
UDS_RC.routineControlTypes[0x0544] = "Verify partial software checksum"
UDS_RC.routineControlTypes[0x0594] = "Check upload preconditions"
UDS_RC.routineControlTypes[0xff00] = "Erase Memory"
UDS_RC.routineControlTypes[0xff01] = "Check Programming Dependencies"
UDS_RD.dataFormatIdentifiers[0x0000] = "Uncompressed"
UDS_RD.dataFormatIdentifiers[0x0001] = "Compression Method 1"
UDS_RD.dataFormatIdentifiers[0x0002] = "Compression Method 2"
UDS_RD.dataFormatIdentifiers[0x0003] = "Compression Method 3"
UDS_RD.dataFormatIdentifiers[0x0004] = "Compression Method 4"
UDS_RD.dataFormatIdentifiers[0x0005] = "Compression Method 5"
UDS_RD.dataFormatIdentifiers[0x0006] = "Compression Method 6"
UDS_RD.dataFormatIdentifiers[0x0007] = "Compression Method 7"
UDS_RD.dataFormatIdentifiers[0x0008] = "Compression Method 8"
UDS_RD.dataFormatIdentifiers[0x0009] = "Compression Method 9"
UDS_RD.dataFormatIdentifiers[0x000a] = "Compression Method 10"
UDS_RD.dataFormatIdentifiers[0x000b] = "Compression Method 11"
UDS_RD.dataFormatIdentifiers[0x000c] = "Compression Method 12"
UDS_RD.dataFormatIdentifiers[0x000d] = "Compression Method 13"
UDS_RD.dataFormatIdentifiers[0x000e] = "Compression Method 14"
UDS_RD.dataFormatIdentifiers[0x000f] = "Compression Method 15"
| 85.566855
| 125
| 0.89308
| 35,819
| 272,616
| 6.167174
| 0.100673
| 0.099596
| 0.312917
| 0.244887
| 0.83705
| 0.778164
| 0.676789
| 0.562068
| 0.438547
| 0.333967
| 0
| 0.053454
| 0.036883
| 272,616
| 3,185
| 126
| 85.593721
| 0.787882
| 0.001203
| 0
| 0
| 0
| 0
| 0.557257
| 0.538846
| 0
| 0
| 0.06981
| 0
| 0
| 1
| 0
| true
| 0.070369
| 0.000316
| 0
| 0.000316
| 0.000631
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
56c8f0b84ffdeed3f137158923a6968118bac402
| 33
|
py
|
Python
|
python/testData/copyPaste/Dictionary.after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/copyPaste/Dictionary.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/copyPaste/Dictionary.after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
a = 1
d = {
'a': 1,
}
b = 2
| 4.714286
| 11
| 0.212121
| 7
| 33
| 1
| 0.714286
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 0.515152
| 33
| 7
| 12
| 4.714286
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
56ed1e106f24d26fef5cf8e6318ac127c64db91d
| 204
|
py
|
Python
|
concepts/OO_class/Counter.py
|
A-Kastner/101repo
|
692a1a967b1d0f6db0c64739f9c13d68a6a6ae17
|
[
"MIT"
] | 15
|
2015-04-23T02:43:22.000Z
|
2021-12-07T13:39:26.000Z
|
concepts/OO_class/Counter.py
|
A-Kastner/101repo
|
692a1a967b1d0f6db0c64739f9c13d68a6a6ae17
|
[
"MIT"
] | 4
|
2021-12-02T15:53:30.000Z
|
2022-02-09T22:54:15.000Z
|
concepts/OO_class/Counter.py
|
A-Kastner/101repo
|
692a1a967b1d0f6db0c64739f9c13d68a6a6ae17
|
[
"MIT"
] | 14
|
2015-06-04T10:05:20.000Z
|
2021-03-08T12:20:26.000Z
|
class Counter:
def __init__(self):
self.count = 0
def __repr__(self):
return str(self.count)
def inc(self):
self.count += 1
def reset(self):
self.count = 0
| 20.4
| 30
| 0.54902
| 27
| 204
| 3.851852
| 0.481481
| 0.346154
| 0.375
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.338235
| 204
| 9
| 31
| 22.666667
| 0.748148
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0.111111
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
711640595ed3766180c27bce51fb6cbcc2720557
| 5,564
|
py
|
Python
|
tests/test_hpoo_rest.py
|
rubenjf/oo_cli
|
c315aaac037bc281a7d8639cc0a7d780cbd4545c
|
[
"MIT"
] | null | null | null |
tests/test_hpoo_rest.py
|
rubenjf/oo_cli
|
c315aaac037bc281a7d8639cc0a7d780cbd4545c
|
[
"MIT"
] | null | null | null |
tests/test_hpoo_rest.py
|
rubenjf/oo_cli
|
c315aaac037bc281a7d8639cc0a7d780cbd4545c
|
[
"MIT"
] | null | null | null |
import unittest
from mock import Mock
from oo_client.hpoo import OORestCaller
import oo_client.errors as errors
import requests
class TestHPOORest(unittest.TestCase):
def setUp(self):
self.mock_reqs = Mock()
requests.Session = self.mock_reqs
def test_post(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'post.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
ret = rest.post('some-path')
url = "https://blah:1234/oo/rest/v1/some-path"
mock_session.post.assert_called_with(url, None)
self.assertEquals(ret, {'aaa': 'aaa'})
def test_post_file(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'post.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
ret = rest.post('some-path', files=['some_file', 'some_other_file'])
url = "https://blah:1234/oo/rest/v1/some-path"
mock_session.post.assert_called_with(url,
files=['some_file',
'some_other_file'])
self.assertEquals(ret, {'aaa': 'aaa'})
def test_post_json(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'post.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
ret = rest.post('some-path', data={'some': 'data'})
url = "https://blah:1234/oo/rest/v1/some-path"
mock_session.post.assert_called_with(url,
'{"some": "data"}')
self.assertEquals(ret, {'aaa': 'aaa'})
def test_post_error(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 400
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'post.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
with self.assertRaises(errors.HTTPNon200):
rest.post('some-path', Mock())
def test_put(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'put.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
mock_data = Mock()
rest.put('some-path', mock_data)
url = "https://blah:1234/oo/rest/v1/some-path"
mock_session.put.assert_called_with(url, mock_data)
def test_put_error(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 400
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'put.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
with self.assertRaises(errors.HTTPNon200):
rest.put('some-path', Mock())
def test_get(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 200
mock_response.text = '{"aaa": "aaa"}'
mock_response.headers = {}
mock_config = {'get.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
assert rest.session.auth == ("aa", "bb")
assert rest.session.verify is True
ret = rest.get('some-path')
self.assertEquals(ret, {'aaa': 'aaa'})
url = "https://blah:1234/oo/rest/v1/some-path"
mock_session.get.assert_called_with(url, params={})
def test_get_error(self):
mock_session = Mock()
mock_response = Mock()
mock_response.status_code = 400
mock_response.text = '{"aaa": "aaa"}'
mock_config = {'get.return_value': mock_response}
mock_session.configure_mock(**mock_config)
self.mock_reqs.return_value = mock_session
rest = OORestCaller("https://blah:1234", "aa", "bb")
with self.assertRaises(errors.HTTPNon200):
rest.get('some-path')
def tearDown(self):
pass
| 39.460993
| 76
| 0.604781
| 675
| 5,564
| 4.757037
| 0.097778
| 0.123326
| 0.079726
| 0.05232
| 0.878231
| 0.85176
| 0.823108
| 0.823108
| 0.789474
| 0.789474
| 0
| 0.021813
| 0.258447
| 5,564
| 140
| 77
| 39.742857
| 0.756423
| 0
| 0
| 0.72
| 0
| 0
| 0.142703
| 0
| 0
| 0
| 0
| 0
| 0.192
| 1
| 0.08
| false
| 0.008
| 0.04
| 0
| 0.128
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
713dadf419fa2346aebf70a12dda5be5190a98bf
| 1,239
|
py
|
Python
|
python3/problem8.py
|
gideondsouza/projecteuler-multilingual
|
83ff2bcc24009f608804df3e7be6377957bb4cfb
|
[
"MIT"
] | null | null | null |
python3/problem8.py
|
gideondsouza/projecteuler-multilingual
|
83ff2bcc24009f608804df3e7be6377957bb4cfb
|
[
"MIT"
] | null | null | null |
python3/problem8.py
|
gideondsouza/projecteuler-multilingual
|
83ff2bcc24009f608804df3e7be6377957bb4cfb
|
[
"MIT"
] | null | null | null |
#import pdb # had to debug this a big :/
NUMBER = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
MAX = 0
def get_prod(s):
p = 1
for c in s:
p = p * int(c)
return p
for i in range(0, (len(NUMBER) - 13) + 1):
A = get_prod(NUMBER[i:i+13])
if A > MAX:
MAX = A
print(MAX)
| 61.95
| 1,011
| 0.91364
| 51
| 1,239
| 22.156863
| 0.568627
| 0.012389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.860803
| 0.054883
| 1,239
| 19
| 1,012
| 65.210526
| 0.104184
| 0.03067
| 0
| 0
| 0
| 0
| 0.834028
| 0.834028
| 0
| 1
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.166667
| 0.083333
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a42b9544e4169a60fb8596316937a50b345207a6
| 135
|
py
|
Python
|
silhouette/utils.py
|
yun548/django-silhouette
|
117cdb0188ae727e8672c552d0b22eeb8294e931
|
[
"MIT"
] | 6
|
2015-02-28T10:34:22.000Z
|
2015-04-16T13:06:28.000Z
|
silhouette/utils.py
|
yun548/django-silhouette
|
117cdb0188ae727e8672c552d0b22eeb8294e931
|
[
"MIT"
] | 5
|
2015-06-03T10:30:41.000Z
|
2017-05-19T15:04:41.000Z
|
silhouette/utils.py
|
OohlaLabs/django-silhouette
|
4ad8968b8bf331744bf34fb7091789749a0d2b23
|
[
"MIT"
] | 3
|
2016-04-24T11:10:39.000Z
|
2020-04-05T09:49:53.000Z
|
import re
def normalize(name):
return re.sub('(((?<=[a-z])[A-Z1-9])|([A-Z1-9](?![A-Z1-9]|$)))', '_\\1', name).strip('_').lower()
| 22.5
| 101
| 0.488889
| 23
| 135
| 2.782609
| 0.608696
| 0.140625
| 0.1875
| 0.15625
| 0.1875
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0.057851
| 0.103704
| 135
| 5
| 102
| 27
| 0.471074
| 0
| 0
| 0
| 0
| 0.333333
| 0.385185
| 0.348148
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 9
|
a490c0b5e9c6fe1d9dce85545a657d2b781628fc
| 195
|
py
|
Python
|
cracking_the_coding_interview_qs/5.4/print_shift_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
cracking_the_coding_interview_qs/5.4/print_shift_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
cracking_the_coding_interview_qs/5.4/print_shift_test.py
|
angelusualle/algorithms
|
86286a49db2a755bc57330cb455bcbd8241ea6be
|
[
"Apache-2.0"
] | null | null | null |
from print_shift import print_shift
import unittest
class Test_Case_Unit_Test_Print_Shift(unittest.TestCase):
def test_print_shift(self):
self.assertTupleEqual(print_shift(5), (6,3))
| 32.5
| 57
| 0.794872
| 29
| 195
| 5
| 0.551724
| 0.344828
| 0.22069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.123077
| 195
| 6
| 58
| 32.5
| 0.830409
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
74d52bcf9a9c2a1c44529074371b471eb33266e0
| 801
|
py
|
Python
|
chapter6-layer-and-network/pooling.py
|
JeremXu/MXNet-Deep-Learning-in-Action
|
a069e8c75f0799e3be80cd27fdeb67531c7df3dd
|
[
"Apache-2.0"
] | 110
|
2018-11-21T11:34:41.000Z
|
2022-03-06T06:18:28.000Z
|
chapter6-layer-and-network/pooling.py
|
stefanjoe/MXNet-Deep-Learning-in-Action
|
a069e8c75f0799e3be80cd27fdeb67531c7df3dd
|
[
"Apache-2.0"
] | 5
|
2019-03-06T07:37:25.000Z
|
2019-10-26T03:39:17.000Z
|
chapter6-layer-and-network/pooling.py
|
stefanjoe/MXNet-Deep-Learning-in-Action
|
a069e8c75f0799e3be80cd27fdeb67531c7df3dd
|
[
"Apache-2.0"
] | 52
|
2019-02-01T08:02:09.000Z
|
2021-12-19T12:25:36.000Z
|
import mxnet as mx
input_data = mx.nd.arange(1,51).reshape((1,2,5,5))
print("Input data:")
print(input_data)
out_data = mx.nd.Pooling(data=input_data, kernel=(2,2), pool_type='max',
global_pool=0, pooling_convention='valid',
stride=(1,1), pad=(0,0))
print("Max pooling result:")
print(out_data)
out_data = mx.nd.Pooling(data=input_data, kernel=(2,2), pool_type='avg',
global_pool=0, pooling_convention='valid',
stride=(1,1), pad=(0,0))
print("Avg pooling result:")
print(out_data)
out_data = mx.nd.Pooling(data=input_data, kernel=(2,2), pool_type='max',
global_pool=1, pooling_convention='valid',
stride=(1,1), pad=(0,0))
print("Global max pooling result:")
print(out_data)
| 38.142857
| 73
| 0.610487
| 122
| 801
| 3.844262
| 0.229508
| 0.115139
| 0.06823
| 0.083156
| 0.818763
| 0.818763
| 0.752665
| 0.752665
| 0.752665
| 0.752665
| 0
| 0.044872
| 0.220974
| 801
| 20
| 74
| 40.05
| 0.706731
| 0
| 0
| 0.526316
| 0
| 0
| 0.12375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0.421053
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
74d81852cb404dcd30366239c5efda808edd80be
| 209
|
py
|
Python
|
hmt/serve/mock/scope.py
|
dfioravanti/hmt
|
df79404076ec7acea0cfb12b636d58e3ffc83bc5
|
[
"MIT"
] | 25
|
2020-05-14T13:25:42.000Z
|
2021-11-09T10:09:27.000Z
|
hmt/serve/mock/scope.py
|
dfioravanti/hmt
|
df79404076ec7acea0cfb12b636d58e3ffc83bc5
|
[
"MIT"
] | 19
|
2020-05-05T19:47:41.000Z
|
2021-02-05T17:06:53.000Z
|
hmt/serve/mock/scope.py
|
dfioravanti/hmt
|
df79404076ec7acea0cfb12b636d58e3ffc83bc5
|
[
"MIT"
] | 6
|
2020-05-16T10:02:48.000Z
|
2021-10-04T08:03:49.000Z
|
class Scope:
def __init__(self):
self._name = None
def set(self, name):
self._name = name
def get(self):
return self._name
def clear(self):
self._name = None
| 16.076923
| 25
| 0.550239
| 27
| 209
| 3.962963
| 0.407407
| 0.373832
| 0.224299
| 0.299065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.349282
| 209
| 12
| 26
| 17.416667
| 0.786765
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0.111111
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
77cd86da6a89629c942162831414e3d7e307a7bb
| 156
|
py
|
Python
|
open-codegen/opengen/functions/is_symbolic.py
|
jgillis/optimization-engine
|
2952af47891204d3cd080a8e7f71e616ac022e52
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
open-codegen/opengen/functions/is_symbolic.py
|
jgillis/optimization-engine
|
2952af47891204d3cd080a8e7f71e616ac022e52
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
open-codegen/opengen/functions/is_symbolic.py
|
jgillis/optimization-engine
|
2952af47891204d3cd080a8e7f71e616ac022e52
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import casadi.casadi as cs
def is_symbolic(u):
return isinstance(u, cs.SX) \
or isinstance(u, cs.MX) \
or isinstance(u, cs.DM)
| 17.333333
| 36
| 0.589744
| 24
| 156
| 3.791667
| 0.583333
| 0.362637
| 0.428571
| 0.32967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294872
| 156
| 8
| 37
| 19.5
| 0.827273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
77f14cd719407157db3f9766e9fa77a5bd81c874
| 155
|
py
|
Python
|
hardware/__init__.py
|
ThomasGerstenberg/sublime3-serial-monitor
|
6e25172aca9ad755b8ec2f7e3efc5664ce35ed7e
|
[
"BSD-3-Clause"
] | 10
|
2016-02-12T08:44:49.000Z
|
2018-08-29T21:34:49.000Z
|
hardware/__init__.py
|
ThomasGerstenberg/sublime3-serial-monitor
|
6e25172aca9ad755b8ec2f7e3efc5664ce35ed7e
|
[
"BSD-3-Clause"
] | 30
|
2015-08-31T18:56:31.000Z
|
2018-12-04T02:55:02.000Z
|
hardware/__init__.py
|
ThomasGerstenberg/sublime3-serial-monitor
|
6e25172aca9ad755b8ec2f7e3efc5664ce35ed7e
|
[
"BSD-3-Clause"
] | 6
|
2016-01-21T03:40:28.000Z
|
2021-12-10T08:13:57.000Z
|
import os
import sys
import sublime
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.join(os.path.dirname(__file__), "serial"))
| 19.375
| 67
| 0.735484
| 24
| 155
| 4.416667
| 0.416667
| 0.169811
| 0.245283
| 0.283019
| 0.358491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116129
| 155
| 7
| 68
| 22.142857
| 0.773723
| 0
| 0
| 0
| 0
| 0
| 0.040816
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7ae525b14c33cb9145488ebb74bda319584c6677
| 9,521
|
py
|
Python
|
utils/cococrab_plots.py
|
AndrewFalkowski/CoCoCrab
|
3e317c67d1cd6882ade01ef6395d5dad0d9658a1
|
[
"MIT"
] | 3
|
2021-12-09T07:43:39.000Z
|
2022-01-14T07:25:24.000Z
|
utils/cococrab_plots.py
|
AndrewFalkowski/CoCoCrab
|
3e317c67d1cd6882ade01ef6395d5dad0d9658a1
|
[
"MIT"
] | null | null | null |
utils/cococrab_plots.py
|
AndrewFalkowski/CoCoCrab
|
3e317c67d1cd6882ade01ef6395d5dad0d9658a1
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.cm as cm
from matplotlib.ticker import AutoMinorLocator
from matplotlib.colors import Normalize
import matplotlib.gridspec as gridspec
from utils.ascension_utils import elem_lookup
from .composition import _element_composition
from scipy import stats
import seaborn as sns
plt.rcParams.update({'font.size': 14})
#%%
def property_optim_plot(optim_frac_df, prop0, prop1, save_dir=None):
'''
Parameters
----------
optim_frac_df : Pandas DataFrame
Pandas DataFrame produced by calling CoCoCrab.optimize
prop0 : str
Optimized property to plot
prop1 : str, optional
Second optimized property to plot, defaults to loss.
save_dir : str, optional
The directory to save produced plots to The default is None.
Returns
-------
Plot of property response to changes in elemental fractions
'''
colors = sns.color_palette('mako', 2)
fig, ax1 = plt.subplots()
epochs = optim_frac_df.index.values
color = colors[1]
ax1.set_xlabel('Epoch')
ax1.set_ylabel(f'{prop1}', color=color)
if not prop1 == 'Loss':
ax1.errorbar(epochs, optim_frac_df[f'{prop1}'],
yerr=optim_frac_df[f'{prop1} UNC'],
color=color, mec='k', alpha=0.35, marker='s')
else:
ax1.plot(optim_frac_df['Loss'], color=color, mec='k', alpha=0.35, marker='s')
ax1.tick_params(axis='y', labelcolor=color)
ax1.tick_params(direction='in',
length=7,top=True, right=True)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax1.get_xaxis().set_minor_locator(minor_locator_x)
ax1.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in', labelcolor=color,
length=4,
right=True,
top=True)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = colors[0]
ax2.set_ylabel(f'{prop0}', color=color)
ax2.errorbar(epochs, optim_frac_df[f'{prop0}'],
yerr=optim_frac_df[f'{prop0} UNC'],
color=color, mec='k', alpha=0.35, marker='o')
ax2.tick_params(axis='y', labelcolor=color, direction='in',
length=7)
# ax2.set_ylim(120,170)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax2.get_xaxis().set_minor_locator(minor_locator_x)
ax2.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in', labelcolor=color,
length=4,
right=True,
top=True)
plt.draw()
if save_dir is not None:
delim = '-'
fig_name = f'{save_dir}/{delim.join(optim_frac_df.iloc[0,0])}_property_optimization.png'
os.makedirs(save_dir, exist_ok=True)
figure = plt.gcf()
figure.set_size_inches(5,5)
plt.savefig(fig_name, dpi=300)
plt.draw()
plt.pause(0.001)
plt.close()
def element_optim_plot(optim_frac_df, save_dir=None):
'''
Parameters
----------
optim_frac_df : Pandas DataFrame
Pandas DataFrame produced by calling CoCoCrab.optimize
save_dir : str, optional
The directory to save produced plots to The default is None.
Returns
-------
Plot of changes in atomic percent of elements during optimization
'''
colors = sns.color_palette()
elems = optim_frac_df.iloc[0,0]
num_elems = int(len(optim_frac_df.iloc[0,0]))
atom_percent = (np.concatenate(optim_frac_df['Fractions'].values)\
.reshape(-1,num_elems))*100
fig = plt.figure(figsize=(5,5))
fig, ax1 = plt.subplots()
for elem in range(num_elems):
plt.plot(atom_percent[:,elem], linestyle=None, marker='s',
color=colors[elem], alpha=0.35, mec='k', label = f'{elems[elem]}')
plt.legend(loc='upper left', framealpha=0.95)
ax1.yaxis.set_label_position("right")
ax1.yaxis.tick_right()
ax1.tick_params(direction='in', length=7,top=True, right=True, left=True)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax1.get_xaxis().set_minor_locator(minor_locator_x)
ax1.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in',
length=4,
right=True,
left=True,
top=True)
plt.ylim(0, 100)
plt.xlabel('Epoch')
plt.ylabel('Atomic Percent (%)')
plt.draw()
if save_dir is not None:
delim = '-'
fig_name = f'{save_dir}/{delim.join(optim_frac_df.iloc[0,0])}_element_fractions.png'
os.makedirs(save_dir, exist_ok=True)
figure = plt.gcf()
figure.set_size_inches(5,5)
plt.savefig(fig_name, dpi=300)
plt.draw()
plt.pause(0.001)
plt.close()
def two_panel_optim(optim_frac_df, prop0, prop1, save_dir):
'''
Parameters
----------
optim_frac_df : Pandas DataFrame
Pandas DataFrame produced by calling CoCoCrab.optimize
prop0 : str
Optimized property to plot
prop1 : str, optional
Second optimized property to plot, defaults to loss.
save_dir : str, optional
The directory to save produced plots to The default is None.
Returns
-------
Two panel plot of changes in predicted property(ies) and atomic number
during optimization. These are the plots used in the corresponding paper.
'''
fig = plt.figure(figsize=(11,5))
spec = gridspec.GridSpec(ncols=2, nrows=1, figure=fig, width_ratios=[1, 1],
wspace=0.35)
ax1 = fig.add_subplot(spec[0, 0])
plt.text(0.5, 1.1, '(a)', horizontalalignment='center',
verticalalignment='center', transform=ax1.transAxes)
colors = sns.color_palette('mako', 2)
epochs = optim_frac_df.index.values
color = colors[1]
ax1.set_xlabel('Epoch')
# ax1.set_ylabel(f'{prop1}', color=color)
ax1.set_ylabel(f'Decomposition Energy (eV/atom)', color=color)
if not prop1 == 'Loss':
ax1.errorbar(epochs, optim_frac_df[f'{prop1}'],
yerr=optim_frac_df[f'{prop1} UNC'],
color=color, mec='k', alpha=0.35, marker='s')
else:
ax1.plot(optim_frac_df['Loss'], color=color, mec='k', alpha=0.35, marker='s')
ax1.tick_params(axis='y', labelcolor=color)
ax1.tick_params(direction='in',
length=7,top=True, right=True)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax1.get_xaxis().set_minor_locator(minor_locator_x)
ax1.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in', labelcolor=color,
length=4,
right=True,
top=True)
ax11 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = colors[0]
ax11.set_ylabel(f'{prop0}', color=color)
ax11.errorbar(epochs, optim_frac_df[f'{prop0}'],
yerr=optim_frac_df[f'{prop0} UNC'],
color=color, mec='k', alpha=0.35, marker='o')
ax11.tick_params(axis='y', labelcolor=color, direction='in',
length=7)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax11.get_xaxis().set_minor_locator(minor_locator_x)
ax11.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in', labelcolor=color,
length=4,
right=True,
top=True)
ax2 = fig.add_subplot(spec[0, 1])
plt.text(0.5, 1.1, '(b)', horizontalalignment='center',
verticalalignment='center', transform=ax2.transAxes)
colors = sns.color_palette()
elems = optim_frac_df.iloc[0,0]
num_elems = int(len(optim_frac_df.iloc[0,0]))
atom_percent = (np.concatenate(optim_frac_df['Fractions'].values)\
.reshape(-1,num_elems))*100
for elem in range(num_elems):
plt.plot(atom_percent[:,elem], linestyle=None, marker='s',
color=colors[elem], alpha=0.35, mec='k', label = f'{elems[elem]}')
plt.legend(loc='upper left', ncol=2, framealpha=0.95)
ax2.yaxis.set_label_position("right")
ax2.yaxis.tick_right()
ax2.tick_params(direction='in', length=7,top=True, right=True, left=True)
minor_locator_x = AutoMinorLocator(2)
minor_locator_y = AutoMinorLocator(2)
ax2.get_xaxis().set_minor_locator(minor_locator_x)
ax2.get_yaxis().set_minor_locator(minor_locator_y)
plt.tick_params(which='minor',
direction='in',
length=4,
right=True,
left=True,
top=True)
plt.ylim(0, 100)
plt.xlabel('Epoch')
plt.ylabel('Atomic Percent (%)')
if save_dir is not None:
delim = '-'
fig_name = f'{save_dir}/{delim.join(optim_frac_df.iloc[0,0])}_jointplot.png'
os.makedirs(save_dir, exist_ok=True)
plt.savefig(fig_name, bbox_inches='tight', dpi=300)
plt.draw()
plt.pause(0.001)
plt.close()
| 36.201521
| 96
| 0.617792
| 1,271
| 9,521
| 4.449253
| 0.164437
| 0.076393
| 0.05252
| 0.04244
| 0.821397
| 0.777365
| 0.755438
| 0.746773
| 0.734925
| 0.734925
| 0
| 0.031643
| 0.256486
| 9,521
| 262
| 97
| 36.339695
| 0.767199
| 0.141897
| 0
| 0.730159
| 0
| 0
| 0.075623
| 0.025792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0
| 0.068783
| 0
| 0.084656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb0bfa95e2d7c40557e7e8b73afb4f6e04153637
| 75
|
py
|
Python
|
up/tasks/multitask/models/union_heads/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 196
|
2021-10-30T05:15:36.000Z
|
2022-03-30T18:43:40.000Z
|
up/tasks/multitask/models/union_heads/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 12
|
2021-10-30T11:33:28.000Z
|
2022-03-31T14:22:58.000Z
|
up/tasks/multitask/models/union_heads/__init__.py
|
ModelTC/EOD
|
164bff80486e9ae6a095a97667b365c46ceabd86
|
[
"Apache-2.0"
] | 23
|
2021-11-01T07:26:17.000Z
|
2022-03-27T05:55:37.000Z
|
from .union_retina_cls import * # noqa
from .union_fc_cls import * # noqa
| 37.5
| 39
| 0.746667
| 12
| 75
| 4.333333
| 0.583333
| 0.346154
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173333
| 75
| 2
| 40
| 37.5
| 0.83871
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
24ccab780c1812ef8272c8bad15cbb6259a19a1e
| 111,576
|
py
|
Python
|
apps/warehouse/serializes/inventory_serialize.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
apps/warehouse/serializes/inventory_serialize.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
apps/warehouse/serializes/inventory_serialize.py
|
kane-zh/MES_server
|
d8d28768a054eee6433e3900908afd331fd92281
|
[
"Apache-2.0"
] | null | null | null |
from rest_framework import serializers
from apps.warehouse.models.inventory_model import *
from apps.warehouse.serializes.basicinfor_serialize import *
from commonFunction import *
from django.contrib.auth import get_user_model
from apps.process.models.basicinfor_model import *
from apps.equipment.models.basicinfor_model import *
from apps.quality.models.basicinfor_model import *
from apps.quality.models.recording_model import *
from Mes import settings
User = get_user_model()
# region 库存明细定义 序列化器
class EquipmentStockDetailSerialize_List(serializers.ModelSerializer) :
"""
设备库存明细--list
"""
class Meta :
model = EquipmentStockDetailModel
fields = "__all__"
class PartsStockDetailSerialize_List(serializers.ModelSerializer) :
"""
设备配件库存明细--list
"""
class Meta :
model = PartsStockDetailModel
fields = "__all__"
class MaterialStockDetailSerialize_List(serializers.ModelSerializer) :
"""
物料库存明细--list
"""
class Meta :
model = MaterialStockDetailModel
fields = "__all__"
class ProductStockDetailSerialize_List(serializers.ModelSerializer) :
"""
产品库存明细--list
"""
class Meta :
model = ProductStockDetailModel
fields = "__all__"
class SemifinishedStockDetailSerialize_List(serializers.ModelSerializer) :
"""
半成品库存明细--list
"""
class Meta :
model = SemifinishedStockDetailModel
fields = "__all__"
# endregion
# region 库存信息定义 序列化器
class EquipmentStockInforSerialize_List(serializers.ModelSerializer) :
"""
设备库存信息--list
"""
class Meta :
model = EquipmentStockInforModel
fields = "__all__"
class PartsStockInforSerialize_List(serializers.ModelSerializer) :
"""
设备配件库存信息--list
"""
class Meta :
model = PartsStockInforModel
fields = "__all__"
class MaterialStockInforSerialize_List(serializers.ModelSerializer) :
"""
物料库存信息--list
"""
class Meta :
model = MaterialStockInforModel
fields = "__all__"
class ProductStockInforSerialize_List(serializers.ModelSerializer) :
"""
产品库存信息--list
"""
class Meta :
model = ProductStockInforModel
fields = "__all__"
class SemifinishedStockInforSerialize_List(serializers.ModelSerializer) :
"""
半成品库存信息--list
"""
class Meta :
model = SemifinishedStockInforModel
fields = "__all__"
# endregion
# region 设备管理 序列化器
class EquipmentManageSerialize_Create(serializers.ModelSerializer) :
"""
设备管理--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = EquipmentManageModel
fields = ("id", "name", "code", "state", "type", "position_id", "equipment_id", "handler", "sum", "dataTime",
"attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user", "auditor"
)
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_equipmentmanagemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
equipment = EquipmentAccountModel.objects.get(id=attrs["equipment_id"]) # 判断指定的设备是否存在
except Exception as e :
raise serializers.ValidationError("指定的设备不存在")
if equipment.state != "使用中" :
raise serializers.ValidationError("指定的设备不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["equipmentType_code"] = equipment.type.code # 获取设备类型编码
attrs["equipmentType_name"] = equipment.type.name # 获取设备类型名称
attrs["equipment_code"] = equipment.code # 获取设备编码
attrs["equipment_name"] = equipment.name # 获取设备名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_equipmentmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class EquipmentManageSerialize_List(serializers.ModelSerializer) :
"""
设备管理--list
"""
class Meta :
model = EquipmentManageModel
fields = ("id", "name", "code", "state", "type", "warehouse_name", "warehouse_code", "position_code", "position_name",
"equipmentType_code", "equipmentType_name","equipment_code", "equipment_name", "handler", "sum", "dataTime",
"auditor", "create_user","create_time","update_time")
class EquipmentManageSerialize_Retrieve(serializers.ModelSerializer) :
"""
设备管理--retrieve
"""
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = EquipmentManageModel
fields = "__all__"
class EquipmentManageSerialize_Update(serializers.ModelSerializer) :
"""
设备管理--update
"""
class Meta :
model = EquipmentManageModel
fields = (
"id", "name", "code", "type", "position_id", "equipment_id", "handler", "sum", "dataTime", "attribute1",
"attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "alter")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
equipment = EquipmentAccountModel.objects.get(id=attrs["equipment_id"]) # 判断指定的设备是否存在
except Exception as e :
raise serializers.ValidationError("指定的设备不存在")
if equipment.state != "使用中" :
raise serializers.ValidationError("指定的设备不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["equipmentType_code"] = equipment.type.code # 获取设备类型编码
attrs["equipmentType_name"] = equipment.type.name # 获取设备类型名称
attrs["equipment_code"] = equipment.code # 获取设备编码
attrs["equipment_name"] = equipment.name # 获取设备名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_equipmentmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class EquipmentManageSerialize_Partial(serializers.ModelSerializer) :
"""
设备管理--partial
"""
class Meta :
model = EquipmentManageModel
fields = ("id", "state", "alter")
# 入库操作条件判断
def storage(self, state) :
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if position.state != "闲置" : # 如果指定的仓位不处于‘空闲状态’
raise serializers.ValidationError("当前仓位不在‘空闲状态’")
if self.instance.sum > position.maximum : # 如果操作数量超出了仓位最大容量
raise serializers.ValidationError("操作数量超出了仓位的最大容量’")
position.state = "使用中" # 占用当前仓位(将状态置为‘使用中状态’)
position.save()
if state == "新建" : # 驳回情况下
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
if state == "完成" : # 通过审核情况下
EquipmentStockDetailModel.objects.create( # 新建一条库存记录
state="使用中",
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
position_id=self.instance.position_id,
position_code=self.instance.position_code,
position_name=self.instance.position_name,
equipmentType_code=self.instance.equipmentType_code,
equipmentType_name=self.instance.equipmentType_name,
equipment_id=self.instance.equipment_id,
equipment_code=self.instance.equipment_code,
equipment_name=self.instance.equipment_name,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
condtions1 = {'equipment_id__iexact' : self.instance.equipment_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
equipmentStockInfor = EquipmentStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
equipmentStockInfor.sum += self.instance.sum # 更新库存数量
equipmentStockInfor.save()
except Exception as e :
EquipmentStockInforModel.objects.create( # 新建一条库存记录
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
equipmentType_code=self.instance.equipmentType_code,
equipmentType_name=self.instance.equipmentType_name,
equipment_id=self.instance.equipment_id,
equipment_code=self.instance.equipment_code,
equipment_name=self.instance.equipment_name,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
if state == "作废" and self.instance.state == "审核中" : # 如果审核过程中报废信息
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
# 增加操作 条件判断
def increase(self, state) :
condtions = {'state__iexact' : "使用中",
'equipment_id__iexact' : self.instance.equipment_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
equipmentStockDetail = EquipmentStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if (self.instance.sum + equipmentStockDetail.sum) > position.maximum : # 如果操作数量+库存数量 超出库存数量
raise serializers.ValidationError("当前增加数量加库存数量超出仓位最大容量")
if state == "完成" : # 通过审核情况下
condtions1 = {'equipment_id__iexact' : self.instance.equipment_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
equipmentStockInfor = EquipmentStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
equipmentStockInfor.sum += self.instance.sum # 更新库存数量
equipmentStockInfor.save()
equipmentStockDetail.sum += self.instance.sum # 更新库存数量
equipmentStockDetail.save()
# 出库操作 条件判断
def outbound(self, state) :
condtions = {'state__iexact' : "使用中",
'equipment_id__iexact' : self.instance.equipment_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
equipmentStockDetail = EquipmentStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" or state == "完成": # 提交情况下
if self.instance.sum > equipmentStockDetail.sum : # 如果操作数量超出库存数量
raise serializers.ValidationError("当前出库数量超出库存数量")
if state == "完成" : # 通过审核情况下
condtions1 = {'equipment_id__iexact' : self.instance.equipment_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
equipmentStockInfor = EquipmentStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
equipmentStockInfor.sum -= self.instance.sum # 更新库存数量
equipmentStockInfor.save()
equipmentStockDetail.sum -= self.instance.sum # 更新库存数量
equipmentStockDetail.save()
if (equipmentStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
equipmentStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
equipmentStockDetail.save()
# 盘点操作 条件判断
def inventory(self, state) :
condtions = {'state__iexact' : "使用中",
'equipment_id__iexact' : self.instance.equipment_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
equipmentStockDetail = EquipmentStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "完成" : # 通过审核情况下
condtions1 = {'equipment_id__iexact' : self.instance.equipment_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
equipmentStockInfor = EquipmentStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
equipmentStockInfor.sum += self.instance.sum # 更新库存数量
equipmentStockInfor.save()
equipmentStockDetail.sum += self.instance.sum # 更新库存数量
equipmentStockDetail.save()
if (equipmentStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
equipmentStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
equipmentStockDetail.save()
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
if self.instance.type == "增加操作" :
self.increase(attrs['state'])
elif self.instance.type == "入库操作" or self.instance.type == "退库操作" :
self.storage(attrs['state'])
elif self.instance.type == "出库操作" :
self.outbound(attrs['state'])
elif self.instance.type == "盘点操作" :
self.inventory(attrs['state'])
return attrs
# 状态字段验证
def validate_state(self, value) :
if ((self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username)) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
if (self.instance.state == "新建" and \
(value == "审核中" or value == "作废")) :
return value
if (self.instance.state == "审核中" and \
(value == "完成" or value == "新建" or value == "作废")) :
return value
if (self.instance.state == "完成" and \
(value == "作废")) :
return value
raise serializers.ValidationError("不能从" + self.instance.state + "更新到" + value)
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = EquipmentManageModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 设备配件管理 序列化器
class PartsManageSerialize_Create(serializers.ModelSerializer) :
"""
设备配件管理--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = PartsManageModel
fields = (
"id", "name", "code", "state", "type", "position_id", "parts_id", "handler", "sum", "dataTime", "attribute1",
"attribute2","attribute3", "attribute4", "attribute5", "desc", "create_user", "auditor"
)
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_partsmanagemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
parts = PartsInforDefinitionModel.objects.get(id=attrs["parts_id"]) # 判断指定的设备配件是否存在
except Exception as e :
raise serializers.ValidationError("指定的设备配件不存在")
if parts.state != "使用中" :
raise serializers.ValidationError("指定的设备配件不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["partsType_code"] = parts.type.code # 获取设备配件类型编码
attrs["partsType_name"] = parts.type.name # 获取设备配件类型名称
attrs["parts_code"] = parts.code # 获取设备配件编码
attrs["parts_name"] = parts.name # 获取设备配件名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_partsmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class PartsManageSerialize_List(serializers.ModelSerializer) :
"""
设备配件管理--list
"""
class Meta :
model = PartsManageModel
fields = ("id", "name", "code", "state", "type", "warehouse_name", "warehouse_code", "position_code", "position_name",
"partsType_code", "partsType_name", "parts_code", "parts_name", "handler", "sum", "dataTime", "auditor",
"create_user","create_time","update_time")
class PartsManageSerialize_Retrieve(serializers.ModelSerializer) :
"""
设备配件管理--retrieve
"""
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = PartsManageModel
fields = "__all__"
class PartsManageSerialize_Update(serializers.ModelSerializer) :
"""
设备配件管理--update
"""
class Meta :
model = PartsManageModel
fields = ("id", "name", "code", "type", "position_id", "parts_id", "handler", "sum", "dataTime", "attribute1",
"attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "alter")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
parts = PartsInforDefinitionModel.objects.get(id=attrs["parts_id"]) # 判断指定的设备配件是否存在
except Exception as e :
raise serializers.ValidationError("指定的设备配件不存在")
if parts.state != "使用中" :
raise serializers.ValidationError("指定的设备配件不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["partsType_code"] = parts.type.code # 获取设备配件类型编码
attrs["partsType_name"] = parts.type.name # 获取设备配件类型名称
attrs["parts_code"] = parts.code # 获取设备配件编码
attrs["parts_name"] = parts.name # 获取设备配件名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_partsmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class PartsManageSerialize_Partial(serializers.ModelSerializer) :
"""
设备配件管理--partial
"""
class Meta :
model = PartsManageModel
fields = ("id", "state", "alter")
# 入库操作条件判断
def storage(self, state) :
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if position.state != "闲置" : # 如果指定的仓位不处于‘空闲状态’
raise serializers.ValidationError("当前仓位不在‘空闲状态’")
if self.instance.sum > position.maximum : # 如果操作数量超出了仓位最大容量
raise serializers.ValidationError("操作数量超出了仓位的最大容量’")
position.state = "使用中" # 占用当前仓位(将状态置为‘使用中状态’)
position.save()
if state == "新建" : # 驳回情况下
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
if state == "完成" : # 通过审核情况下
PartsStockDetailModel.objects.create( # 新建一条库存记录
state="使用中",
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
position_id=self.instance.position_id,
position_code=self.instance.position_code,
position_name=self.instance.position_name,
partsType_code=self.instance.partsType_code,
partsType_name=self.instance.partsType_name,
parts_id=self.instance.parts_id,
parts_code=self.instance.parts_code,
parts_name=self.instance.parts_name,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
condtions1 = {'parts_id__iexact' : self.instance.parts_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
partsStockInfor = PartsStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
partsStockInfor.sum += self.instance.sum # 更新库存数量
partsStockInfor.save()
except Exception as e :
PartsStockInforModel.objects.create( # 新建一条库存记录
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
partsType_code=self.instance.partsType_code,
partsType_name=self.instance.partsType_name,
parts_id=self.instance.parts_id,
parts_code=self.instance.parts_code,
parts_name=self.instance.parts_name,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
if state == "作废" and self.instance.state == "审核中" : # 如果审核过程中报废信息
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
# 增加操作 条件判断
def increase(self, state) :
condtions = {'state__iexact' : "使用中",
'parts_id__iexact' : self.instance.parts_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
partsStockDetail = PartsStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if (self.instance.sum + partsStockDetail.sum) > position.maximum : # 如果操作数量+库存数量 超出库存数量
raise serializers.ValidationError("当前增加数量加库存数量超出仓位最大容量")
if state == "完成" : # 通过审核情况下
condtions1 = {'parts_id__iexact' : self.instance.parts_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
partsStockInfor = PartsStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
partsStockInfor.sum += self.instance.sum # 更新库存数量
partsStockInfor.save()
partsStockDetail.sum += self.instance.sum # 更新库存数量
partsStockDetail.save()
# 出库操作 条件判断
def outbound(self, state) :
condtions = {'state__iexact' : "使用中",
'parts_id__iexact' : self.instance.parts_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
partsStockDetail = PartsStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" or state == "完成": # 提交情况下
if self.instance.sum > partsStockDetail.sum : # 如果操作数量超出库存数量
raise serializers.ValidationError("当前出库数量超出库存数量")
if state == "完成" : # 通过审核情况下
condtions1 = {'parts_id__iexact' : self.instance.parts_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
partsStockInfor = PartsStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
partsStockInfor.sum -= self.instance.sum # 更新库存数量
partsStockInfor.save()
partsStockDetail.sum -= self.instance.sum # 更新库存数量
partsStockDetail.save()
if (partsStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
partsStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
partsStockDetail.save()
# 盘点操作 条件判断
def inventory(self, state) :
condtions = {'state__iexact' : "使用中",
'parts_id__iexact' : self.instance.parts_id,
'position_id__iexact' : self.instance.position_id,
}
if state == "作废" :
return
try :
partsStockDetail = PartsStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "完成" : # 通过审核情况下
condtions1 = {'parts_id__iexact' : self.instance.parts_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
}
try :
partsStockInfor = PartsStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
partsStockInfor.sum += self.instance.sum # 更新库存数量
partsStockInfor.save()
partsStockDetail.sum += self.instance.sum # 更新库存数量
partsStockDetail.save()
if (partsStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
partsStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
partsStockDetail.save()
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
if self.instance.type == "增加操作" :
self.increase(attrs['state'])
elif self.instance.type == "入库操作" or self.instance.type == "退库操作" :
self.storage(attrs['state'])
elif self.instance.type == "出库操作" :
self.outbound(attrs['state'])
elif self.instance.type == "盘点操作" :
self.inventory(attrs['state'])
return attrs
# 状态字段验证
def validate_state(self, value) :
if ((self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username)) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
if (self.instance.state == "新建" and \
(value == "审核中" or value == "作废")) :
return value
if (self.instance.state == "审核中" and \
(value == "完成" or value == "新建" or value == "作废")) :
return value
if (self.instance.state == "完成" and \
(value == "作废")) :
return value
raise serializers.ValidationError("不能从" + self.instance.state + "更新到" + value)
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = PartsManageModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 物料管理 序列化器
class MaterialManageSerialize_Create(serializers.ModelSerializer) :
"""
物料管理--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = MaterialManageModel
fields = ("id", "name", "code", "state", "type", "position_id", "material_id","inspectionReport_id", "handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user", "auditor"
)
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_materialmanagemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
material = MaterialInforDefinitionModel.objects.get(id=attrs["material_id"]) # 判断指定的物料是否存在
except Exception as e :
raise serializers.ValidationError("指定的物料不存在")
if material.state != "使用中" :
raise serializers.ValidationError("指定的物料不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["materialType_code"] = material.type.code # 获取物料类型编码
attrs["materialType_name"] = material.type.name # 获取物料类型名称
attrs["material_code"] = material.code # 获取物料编码
attrs["material_name"] = material.name # 获取物料名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_materialmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class MaterialManageSerialize_List(serializers.ModelSerializer) :
"""
物料管理--list
"""
class Meta :
model = MaterialManageModel
fields = ("id", "name", "code", "state", "type", "warehouse_name", "warehouse_code", "position_code", "position_name",
"materialType_code", "materialType_name","material_code", "material_name", "handler", "batch", "sum", "dataTime",
"auditor", "create_user","create_time","update_time")
class MaterialManageSerialize_Retrieve(serializers.ModelSerializer) :
"""
物料管理--retrieve
"""
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = MaterialManageModel
fields = "__all__"
class MaterialManageSerialize_Update(serializers.ModelSerializer) :
"""
物料管理--update
"""
class Meta :
model = MaterialManageModel
fields = ("id", "name", "code", "type", "position_id", "material_id","inspectionReport_id", "handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "alter")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
material = MaterialInforDefinitionModel.objects.get(id=attrs["material_id"]) # 判断指定的物料是否存在
except Exception as e :
raise serializers.ValidationError("指定的物料不存在")
if material.state != "使用中" :
raise serializers.ValidationError("指定的物料不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["materialType_code"] = material.type.code # 获取物料类型编码
attrs["materialType_name"] = material.type.name # 获取物料类型名称
attrs["material_code"] = material.code # 获取物料编码
attrs["material_name"] = material.name # 获取物料名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_materialmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class MaterialManageSerialize_Partial(serializers.ModelSerializer) :
"""
物料管理--partial
"""
class Meta :
model = MaterialManageModel
fields = ("id", "state", "alter")
# 入库操作条件判断
def storage(self, state) :
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if position.state != "闲置" : # 如果指定的仓位不处于‘空闲状态’
raise serializers.ValidationError("当前仓位不在‘空闲状态’")
if self.instance.sum > position.maximum : # 如果操作数量超出了仓位最大容量
raise serializers.ValidationError("操作数量超出了仓位的最大容量’")
position.state = "使用中" # 占用当前仓位(将状态置为‘使用中状态’)
position.save()
if state == "新建" : # 驳回情况下
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
if state == "完成" : # 通过审核情况下
MaterialStockDetailModel.objects.create( # 新建一条库存记录
state="使用中",
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
position_id=self.instance.position_id,
position_code=self.instance.position_code,
position_name=self.instance.position_name,
materialType_code=self.instance.materialType_code,
materialType_name=self.instance.materialType_name,
material_id=self.instance.material_id,
material_code=self.instance.material_code,
material_name=self.instance.material_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
condtions1 = {'material_id__iexact' : self.instance.material_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
materialStockInfor = MaterialStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
materialStockInfor.sum += self.instance.sum # 更新库存数量
materialStockInfor.save()
except Exception as e :
MaterialStockInforModel.objects.create( # 新建一条库存记录
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
materialType_code=self.instance.materialType_code,
materialType_name=self.instance.materialType_name,
material_id=self.instance.material_id,
material_code=self.instance.material_code,
material_name=self.instance.material_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
if state == "作废" and self.instance.state == "审核中" : # 如果审核过程中报废信息
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
# 增加操作 条件判断
def increase(self, state) :
condtions = {'state__iexact' : "使用中",
'material_id__iexact' : self.instance.material_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
materialStockDetail = MaterialStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if (self.instance.sum + materialStockDetail.sum) > position.maximum : # 如果操作数量+库存数量 超出库存数量
raise serializers.ValidationError("当前增加数量加库存数量超出仓位最大容量")
if state == "完成" : # 通过审核情况下
condtions1 = {'material_id__iexact' : self.instance.material_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
materialStockInfor = MaterialStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
materialStockInfor.sum += self.instance.sum # 更新库存数量
materialStockInfor.save()
materialStockDetail.sum += self.instance.sum # 更新库存数量
materialStockDetail.save()
# # 出库操作 条件判断
# def outbound(self, state) :
# condtions = {'state__iexact' : "使用中",
# 'material_id__iexact' : self.instance.material_id,
# 'position_id__iexact' : self.instance.position_id,
# 'batch__iexact' : self.instance.batch
# }
# if state == "作废" :
# return
# try :
# materialStockDetail = MaterialStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
# except Exception as e :
# raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
# position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
# if state == "审核中" or state == "完成": # 提交情况下
# if self.instance.sum > materialStockDetail.sum : # 如果操作数量超出库存数量
# raise serializers.ValidationError("当前出库数量超出库存数量")
# if state == "完成" : # 通过审核情况下
# condtions1 = {'material_id__iexact' : self.instance.material_id,
# 'warehouse_code__iexact' : self.instance.warehouse_code,
# 'batch__iexact' : self.instance.batch
# }
# try :
# materialStockInfor = MaterialStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
# except Exception as e :
# raise serializers.ValidationError("当前库存信息与库存明细不符合")
# materialStockInfor.sum -= self.instance.sum # 更新库存数量
# materialStockInfor.save()
# materialStockDetail.sum -= self.instance.sum # 更新库存数量
# materialStockDetail.save()
# if (materialStockDetail.sum <= 0) :
# position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
# position.save()
# materialStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
# materialStockDetail.save()
def outbound(self, state):
condtions = {'state__iexact': "使用中",
'material_id__iexact': self.instance.material_id,
'position_id__iexact': self.instance.position_id,
'batch__iexact': self.instance.batch
}
try:
materialStockDetail = MaterialStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e:
raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
condtions1 = {'material_id__iexact': self.instance.material_id,
'warehouse_code__iexact': self.instance.warehouse_code,
'batch__iexact': self.instance.batch
}
try:
materialStockInfor = MaterialStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e:
raise serializers.ValidationError("当前库存信息与库存明细不符合")
if (self.instance.state == "新建" and state == "作废"):
return
if (self.instance.state == "新建" and state == "审核中"): # 提交情况下
if self.instance.sum > materialStockDetail.sum: # 如果操作数量超出库存数量
raise serializers.ValidationError("当前出库数量超出库存数量")
materialStockInfor.sum -= self.instance.sum # 更新库存数量
materialStockInfor.save()
materialStockDetail.sum -= self.instance.sum # 更新库存数量
materialStockDetail.save()
if (self.instance.state == "审核中" and state == "完成"): # 通过审核情况下
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if (materialStockDetail.sum <= 0):
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
materialStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
materialStockDetail.save()
if (self.instance.state == "审核中" and state == "新建"): # 驳回情况下
materialStockInfor.sum += self.instance.sum # 更新库存数量
materialStockInfor.save()
materialStockDetail.sum += self.instance.sum # 更新库存数量
materialStockDetail.save()
if (self.instance.state == "审核中" and state == "作废"): # 审核作废情况下
materialStockInfor.sum += self.instance.sum # 更新库存数量
materialStockInfor.save()
materialStockDetail.sum += self.instance.sum # 更新库存数量
materialStockDetail.save()
# 盘点操作 条件判断
def inventory(self, state) :
condtions = {'state__iexact' : "使用中",
'material_id__iexact' : self.instance.material_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
materialStockDetail = MaterialStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "完成" : # 通过审核情况下
condtions1 = {'material_id__iexact' : self.instance.material_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
materialStockInfor = MaterialStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
materialStockInfor.sum += self.instance.sum # 更新库存数量
materialStockInfor.save()
materialStockDetail.sum += self.instance.sum # 更新库存数量
materialStockDetail.save()
if (materialStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
materialStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
materialStockDetail.save()
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
if self.instance.type == "增加操作" :
self.increase(attrs['state'])
elif self.instance.type == "入库操作" or self.instance.type == "退库操作" :
self.storage(attrs['state'])
elif self.instance.type == "出库操作" :
self.outbound(attrs['state'])
elif self.instance.type == "盘点操作" :
self.inventory(attrs['state'])
return attrs
# 状态字段验证
def validate_state(self, value) :
if ((self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username)) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
if (self.instance.state == "新建" and \
(value == "审核中" or value == "作废")) :
return value
if (self.instance.state == "审核中" and \
(value == "完成" or value == "新建" or value == "作废")) :
return value
if (self.instance.state == "完成" and \
(value == "作废")) :
return value
raise serializers.ValidationError("不能从" + self.instance.state + "更新到" + value)
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = MaterialManageModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 半成品管理 序列化器
class SemifinishedManageSerialize_Create(serializers.ModelSerializer) :
"""
半成品管理--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = SemifinishedManageModel
fields = ("id", "name", "code", "state", "type", "position_id", "semifinished_id", "inspectionReport_id","handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user", "auditor"
)
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_semifinishedmanagemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
semifinished = SemifinishedInforDefinitionModel.objects.get(id=attrs["semifinished_id"]) # 判断指定的物料是否存在
except Exception as e :
raise serializers.ValidationError("指定的半成品不存在")
if semifinished.state != "使用中" :
raise serializers.ValidationError("指定的半成品不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["semifinishedType_code"] = semifinished.type.code # 获取半成品类型编码
attrs["semifinishedType_name"] = semifinished.type.name # 获取半成品类型名称
attrs["semifinished_code"] = semifinished.code # 获取半成品编码
attrs["semifinished_name"] = semifinished.name # 获取半成品名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_semifinishedmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class SemifinishedManageSerialize_List(serializers.ModelSerializer) :
"""
半成品管理--list
"""
class Meta :
model = SemifinishedManageModel
fields = ( "id", "name", "code", "state", "type", "warehouse_name", "warehouse_code", "position_code", "position_name",
"semifinishedType_code", "semifinishedType_name","semifinished_code", "semifinished_name", "handler", "batch", "sum",
"dataTime", "auditor", "create_user","create_time","update_time")
class SemifinishedManageSerialize_Retrieve(serializers.ModelSerializer) :
"""
半成品管理--retrieve
"""
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = SemifinishedManageModel
fields = "__all__"
class SemifinishedManageSerialize_Update(serializers.ModelSerializer) :
"""
半成品管理--update
"""
class Meta :
model = SemifinishedManageModel
fields = ("id", "name", "code", "type", "position_id", "semifinished_id","inspectionReport_id", "handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "alter")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
semifinished = SemifinishedInforDefinitionModel.objects.get(id=attrs["semifinished_id"]) # 判断指定的半成品是否存在
except Exception as e :
raise serializers.ValidationError("指定的半成品不存在")
if semifinished.state != "使用中" :
raise serializers.ValidationError("指定的半成品不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["semifinishedType_code"] = semifinished.type.code # 获取半成品类型编码
attrs["semifinishedType_name"] = semifinished.type.name # 获取半成品类型名称
attrs["semifinished_code"] = semifinished.code # 获取半成品编码
attrs["semifinished_name"] = semifinished.name # 获取半成品名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_semifinishedmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class SemifinishedManageSerialize_Partial(serializers.ModelSerializer) :
"""
半成品管理--partial
"""
class Meta :
model = SemifinishedManageModel
fields = ("id", "state", "alter")
# 入库操作条件判断
def storage(self, state) :
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if position.state != "闲置" : # 如果指定的仓位不处于‘空闲状态’
raise serializers.ValidationError("当前仓位不在‘空闲状态’")
if self.instance.sum > position.maximum : # 如果操作数量超出了仓位最大容量
raise serializers.ValidationError("操作数量超出了仓位的最大容量’")
position.state = "使用中" # 占用当前仓位(将状态置为‘使用中状态’)
position.save()
if state == "新建" : # 驳回情况下
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
if state == "完成" : # 通过审核情况下
SemifinishedStockDetailModel.objects.create( # 新建一条库存记录
state="使用中",
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
position_id=self.instance.position_id,
position_code=self.instance.position_code,
position_name=self.instance.position_name,
semifinishedType_code=self.instance.semifinishedType_code,
semifinishedType_name=self.instance.semifinishedType_name,
semifinished_id=self.instance.semifinished_id,
semifinished_code=self.instance.semifinished_code,
semifinished_name=self.instance.semifinished_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5
)
condtions1 = {'semifinished_id__iexact' : self.instance.semifinished_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
semifinishedStockInfor = SemifinishedStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
semifinishedStockInfor.sum += self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
except Exception as e :
SemifinishedStockInforModel.objects.create( # 新建一条库存记录
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
semifinishedType_code=self.instance.semifinishedType_code,
semifinishedType_name=self.instance.semifinishedType_name,
semifinished_id=self.instance.semifinished_id,
semifinished_code=self.instance.semifinished_code,
semifinished_name=self.instance.semifinished_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
if state == "作废" and self.instance.state == "审核中" : # 如果审核过程中报废信息
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
# 增加操作 条件判断
def increase(self, state) :
condtions = {'state__iexact' : "使用中",
'semifinished_id__iexact' : self.instance.semifinished_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
semifinishedStockDetail = SemifinishedStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if (self.instance.sum + semifinishedStockDetail.sum) > position.maximum : # 如果操作数量+库存数量 超出库存数量
raise serializers.ValidationError("当前增加数量加库存数量超出仓位最大容量")
if state == "完成" : # 通过审核情况下
condtions1 = {'semifinished_id__iexact' : self.instance.semifinished_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
semifinishedStockInfor = SemifinishedStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
semifinishedStockInfor.sum += self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
semifinishedStockDetail.sum += self.instance.sum # 更新库存数量
semifinishedStockDetail.save()
# 出库操作 条件判断
# def outbound(self, state) :
# condtions = {'state__iexact' : "使用中",
# 'semifinished_id__iexact' : self.instance.semifinished_id,
# 'position_id__iexact' : self.instance.position_id,
# 'batch__iexact' : self.instance.batch
# }
# if state == "作废" :
# return
# try :
# semifinishedStockDetail = SemifinishedStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
# except Exception as e :
# raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
# position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
# if state == "审核中" or state == "完成": # 提交情况下
# if self.instance.sum > semifinishedStockDetail.sum : # 如果操作数量超出库存数量
# raise serializers.ValidationError("当前出库数量超出库存数量")
# if state == "完成" : # 通过审核情况下
# condtions1 = {'semifinished_id__iexact' : self.instance.semifinished_id,
# 'warehouse_code__iexact' : self.instance.warehouse_code,
# 'batch__iexact' : self.instance.batch
# }
# try :
# semifinishedStockInfor = SemifinishedStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
# except Exception as e :
# raise serializers.ValidationError("当前库存信息与库存明细不符合")
# semifinishedStockInfor.sum -= self.instance.sum # 更新库存数量
# semifinishedStockInfor.save()
# semifinishedStockDetail.sum -= self.instance.sum # 更新库存数量
# semifinishedStockDetail.save()
# if (semifinishedStockDetail.sum <= 0) :
# position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
# position.save()
# semifinishedStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
# semifinishedStockDetail.save()
def outbound(self, state):
condtions = {'state__iexact': "使用中",
'semifinished_id__iexact': self.instance.semifinished_id,
'position_id__iexact': self.instance.position_id,
'batch__iexact': self.instance.batch
}
try:
semifinishedStockDetail = SemifinishedStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e:
raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
condtions1 = {'semifinished_id__iexact': self.instance.semifinished_id,
'warehouse_code__iexact': self.instance.warehouse_code,
'batch__iexact': self.instance.batch
}
try:
semifinishedStockInfor = SemifinishedStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e:
raise serializers.ValidationError("当前库存信息与库存明细不符合")
if (self.instance.state == "新建" and state == "作废"):
return
if (self.instance.state == "新建" and state == "审核中"): # 提交情况下
if self.instance.sum > semifinishedStockDetail.sum: # 如果操作数量超出库存数量
raise serializers.ValidationError("当前出库数量超出库存数量")
semifinishedStockInfor.sum -= self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
semifinishedStockDetail.sum -= self.instance.sum # 更新库存数量
semifinishedStockDetail.save()
if (self.instance.state == "审核中" and state == "完成"): # 通过审核情况下
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if (semifinishedStockDetail.sum <= 0):
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
semifinishedStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
semifinishedStockDetail.save()
if (self.instance.state == "审核中" and state == "新建"): # 驳回情况下
semifinishedStockInfor.sum += self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
semifinishedStockDetail.sum += self.instance.sum # 更新库存数量
semifinishedStockDetail.save()
if (self.instance.state == "审核中" and state == "作废"): # 审核作废情况下
semifinishedStockInfor.sum += self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
semifinishedStockDetail.sum += self.instance.sum # 更新库存数量
semifinishedStockDetail.save()
# 盘点操作 条件判断
def inventory(self, state) :
condtions = {'state__iexact' : "使用中",
'semifinished_id__iexact' : self.instance.semifinished_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
semifinishedStockDetail = SemifinishedStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "完成" : # 通过审核情况下
condtions1 = {'semifinished_id__iexact' : self.instance.semifinished_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
semifinishedStockInfor = SemifinishedStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
semifinishedStockInfor.sum += self.instance.sum # 更新库存数量
semifinishedStockInfor.save()
semifinishedStockDetail.sum += self.instance.sum # 更新库存数量
semifinishedStockDetail.save()
if (semifinishedStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
semifinishedStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
semifinishedStockDetail.save()
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
if self.instance.type == "增加操作" :
self.increase(attrs['state'])
elif self.instance.type == "入库操作" or self.instance.type == "退库操作" :
self.storage(attrs['state'])
elif self.instance.type == "出库操作" :
self.outbound(attrs['state'])
elif self.instance.type == "盘点操作" :
self.inventory(attrs['state'])
return attrs
# 状态字段验证
def validate_state(self, value) :
if (self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
if (self.instance.state == "新建" and \
(value == "审核中" or value == "作废")) :
return value
if (self.instance.state == "审核中" and \
(value == "完成" or value == "新建" or value == "作废")) :
return value
if (self.instance.state == "完成" and \
(value == "作废")) :
return value
raise serializers.ValidationError("不能从" + self.instance.state + "更新到" + value)
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = SemifinishedManageModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 产品管理 序列化器
class ProductManageSerialize_Create(serializers.ModelSerializer) :
"""
产品管理--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = ProductManageModel
fields = ("id", "name", "code", "state", "type", "position_id", "product_id", "inspectionReport_id","handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "file", "desc", "create_user", "auditor"
)
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_productmanagemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
product = ProductInforDefinitionModel.objects.get(id=attrs["product_id"]) # 判断指定的产品是否存在
except Exception as e :
raise serializers.ValidationError("指定的产品不存在")
if product.state != "使用中" :
raise serializers.ValidationError("指定的产品不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["productType_code"] = product.type.code # 获取产品类型编码
attrs["productType_name"] = product.type.name # 获取产品类型名称
attrs["product_code"] = product.code # 获取产品编码
attrs["product_name"] = product.name # 获取产品名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_productmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class ProductManageSerialize_List(serializers.ModelSerializer) :
"""
产品管理--list
"""
class Meta :
model = ProductManageModel
fields = (
"id", "name", "code", "state", "type", "warehouse_code", "warehouse_name", "position_code", "position_name",
"productType_code", "productType_name", "product_code", "product_name", "handler", "batch", "sum", "dataTime", "auditor",
"create_user","create_time","update_time")
class ProductManageSerialize_Retrieve(serializers.ModelSerializer) :
"""
产品管理--retrieve
"""
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = ProductManageModel
fields = "__all__"
class ProductManageSerialize_Update(serializers.ModelSerializer) :
"""
产品管理--update
"""
class Meta :
model = ProductManageModel
fields = ("id", "name", "code", "type", "position_id", "product_id", "inspectionReport_id","handler", "batch",
"sum", "dataTime", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor",)
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
try :
position = PositionDefinitionModel.objects.get(id=attrs["position_id"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓位不存在")
try :
product = ProductInforDefinitionModel.objects.get(id=attrs["product_id"]) # 判断指定的产品是否存在
except Exception as e :
raise serializers.ValidationError("指定的产品不存在")
if product.state != "使用中" :
raise serializers.ValidationError("指定的产品不在'使用中'状态")
attrs["warehouse_code"] = position.type.code # 获取仓库编码
attrs["warehouse_name"] = position.type.name # 获取仓库名称
attrs["position_code"] = position.code # 获取仓位编码
attrs["position_name"] = position.name # 获取仓位名称
attrs["productType_code"] = product.type.code # 获取产品类型编码
attrs["productType_name"] = product.type.name # 获取产品类型名称
attrs["product_code"] = product.code # 获取产品编码
attrs["product_name"] = product.name # 获取产品名称
if 'inspectionReport_id' in attrs.keys():
if attrs['inspectionReport_id'] is not '':
try:
report = InspectionReportModel.objects.get(id=attrs["inspectionReport_id"]) # 判断指定的质检报告是否存在
except Exception as e:
raise serializers.ValidationError("指定的质检报告不存在")
attrs["inspectionReportType_code"] = report.type.code # 获取质检报告类型编码
attrs["inspectionReportType_name"] = report.type.name # 获取质检报告类型名称
attrs["inspectionReport_code"] = report.code # 获取质检报告编码
attrs["inspectionReport_name"] = report.name # 获取质检报告名称
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_productmanagemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class ProductManageSerialize_Partial(serializers.ModelSerializer) :
"""
产品管理--partial
"""
class Meta :
model = ProductManageModel
fields = ("id", "state", "alter")
# 入库操作/退库操作条件判断
def storage(self, state) :
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if position.state != "闲置" : # 如果指定的仓位不处于‘空闲状态’
raise serializers.ValidationError("当前仓位不在‘空闲状态’")
if self.instance.sum > position.maximum : # 如果操作数量超出了仓位最大容量
raise serializers.ValidationError("操作数量超出了仓位的最大容量’")
position.state = "使用中" # 占用当前仓位(将状态置为‘使用中状态’)
position.save()
if state == "新建" : # 驳回情况下
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
if state == "完成" : # 通过审核情况下
ProductStockDetailModel.objects.create( # 新建一条库存记录
state="使用中",
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
position_id=self.instance.position_id,
position_code=self.instance.position_code,
position_name=self.instance.position_name,
productType_code=self.instance.productType_code,
productType_name=self.instance.productType_name,
product_id=self.instance.product_id,
product_code=self.instance.product_code,
product_name=self.instance.product_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
condtions1 = {'product_id__iexact' : self.instance.product_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
productStockInfor = ProductStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
productStockInfor.sum += self.instance.sum # 更新库存数量
productStockInfor.save()
except Exception as e :
ProductStockInforModel.objects.create( # 新建一条库存记录
warehouse_code=self.instance.warehouse_code,
warehouse_name=self.instance.warehouse_name,
productType_code=self.instance.productType_code,
productType_name=self.instance.productType_name,
product_id=self.instance.product_id,
product_code=self.instance.product_code,
product_name=self.instance.product_name,
batch=self.instance.batch,
sum=self.instance.sum,
attribute1=self.instance.attribute1,
attribute2=self.instance.attribute2,
attribute3=self.instance.attribute3,
attribute4=self.instance.attribute4,
attribute5=self.instance.attribute5)
if state == "作废" and self.instance.state == "审核中" : # 如果审核过程中报废信息
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
# 增加操作 条件判断
def increase(self, state) :
condtions = {'state__iexact' : "使用中",
'product_id__iexact' : self.instance.product_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
productStockDetail = ProductStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "审核中" : # 提交情况下
if (self.instance.sum + productStockDetail.sum) > position.maximum : # 如果操作数量+库存数量 超出库存数量
raise serializers.ValidationError("当前增加数量加库存数量超出仓位最大容量")
if state == "完成" : # 通过审核情况下
condtions1 = {'product_id__iexact' : self.instance.product_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
productStockInfor = ProductStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
productStockInfor.sum += self.instance.sum # 更新库存数量
productStockInfor.save()
productStockDetail.sum += self.instance.sum # 更新库存数量
productStockDetail.save()
# # 出库操作 条件判断
# def outbound(self, state) :
# condtions = {'state__iexact' : "使用中",
# 'product_id__iexact' : self.instance.product_id,
# 'position_id__iexact' : self.instance.position_id,
# 'batch__iexact' : self.instance.batch
# }
# if state == "作废" :
# return
# try :
# productStockDetail = ProductStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
# except Exception as e :
# raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
# if state == "审核中" or state == "完成": # 提交情况下
# if self.instance.sum > productStockDetail.sum : # 如果操作数量超出库存数量
# raise serializers.ValidationError("当前出库数量超出库存数量")
# if state == "完成" : # 通过审核情况下
# condtions1 = {'product_id__iexact' : self.instance.product_id,
# 'warehouse_code__iexact' : self.instance.warehouse_code,
# 'batch__iexact' : self.instance.batch
# }
# try :
# productStockInfor = ProductStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
# except Exception as e :
# raise serializers.ValidationError("当前库存信息与库存明细不符合")
# position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
# productStockInfor.sum -= self.instance.sum # 更新库存数量
# productStockInfor.save()
# productStockDetail.sum -= self.instance.sum # 更新库存数量
# productStockDetail.save()
# if (productStockDetail.sum <= 0) :
# position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
# position.save()
# productStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
# productStockDetail.save()
# 出库操作 条件判断
def outbound(self, state) :
condtions = {'state__iexact' : "使用中",
'product_id__iexact' : self.instance.product_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
try :
productStockDetail = ProductStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行出库操作")
condtions1 = {'product_id__iexact': self.instance.product_id,
'warehouse_code__iexact': self.instance.warehouse_code,
'batch__iexact': self.instance.batch
}
try:
productStockInfor = ProductStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e:
raise serializers.ValidationError("当前库存信息与库存明细不符合")
if (self.instance.state == "新建" and state == "作废"):
return
if (self.instance.state=="新建" and state == "审核中"): # 提交情况下
if self.instance.sum > productStockDetail.sum : # 如果操作数量超出库存数量
raise serializers.ValidationError("当前出库数量超出库存数量")
productStockInfor.sum -= self.instance.sum # 更新库存数量
productStockInfor.save()
productStockDetail.sum -= self.instance.sum # 更新库存数量
productStockDetail.save()
if (self.instance.state=="审核中" and state == "完成"): # 通过审核情况下
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if (productStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
productStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
productStockDetail.save()
if (self.instance.state=="审核中" and state == "新建"): # 驳回情况下
productStockInfor.sum += self.instance.sum # 更新库存数量
productStockInfor.save()
productStockDetail.sum += self.instance.sum # 更新库存数量
productStockDetail.save()
if (self.instance.state=="审核中" and state == "作废"): # 审核作废情况下
productStockInfor.sum += self.instance.sum # 更新库存数量
productStockInfor.save()
productStockDetail.sum += self.instance.sum # 更新库存数量
productStockDetail.save()
# 盘点操作 条件判断
def inventory(self, state) :
condtions = {'state__iexact' : "使用中",
'product_id__iexact' : self.instance.product_id,
'position_id__iexact' : self.instance.position_id,
'batch__iexact' : self.instance.batch
}
if state == "作废" :
return
try :
productStockDetail = ProductStockDetailModel.objects.get(**condtions) # 获取指定的库存明细
except Exception as e :
raise serializers.ValidationError("当前库存明细不存在,无法进行增加操作")
position = PositionDefinitionModel.objects.get(id=self.instance.position_id) # 获取指定的仓位信息
if state == "完成" : # 通过审核情况下
condtions1 = {'product_id__iexact' : self.instance.product_id,
'warehouse_code__iexact' : self.instance.warehouse_code,
'batch__iexact' : self.instance.batch
}
try :
productStockInfor = ProductStockInforModel.objects.get(**condtions1) # 获取指定的库存信息
except Exception as e :
raise serializers.ValidationError("当前库存信息与库存明细不符合")
productStockInfor.sum += self.instance.sum # 更新库存数量
productStockInfor.save()
productStockDetail.sum += self.instance.sum # 更新库存数量
productStockDetail.save()
if (productStockDetail.sum <= 0) :
position.state = "闲置" # 释放当前仓位(将状态置为‘空闲状态’)
position.save()
productStockDetail.state = "完成" # 释放当前库存明细(将状态置为‘空闲状态’)
productStockDetail.save()
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
if self.instance.type == "增加操作" :
self.increase(attrs['state'])
elif self.instance.type == "入库操作" or self.instance.type == "退库操作" :
self.storage(attrs['state'])
elif self.instance.type == "出库操作" :
self.outbound(attrs['state'])
elif self.instance.type == "盘点操作" :
self.inventory(attrs['state'])
return attrs
# 状态字段验证
def validate_state(self, value) :
if (self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
if (self.instance.state == "新建" and \
(value == "审核中" or value == "作废")) :
return value
if (self.instance.state == "审核中" and \
(value == "完成" or value == "新建" or value == "作废")) :
return value
if (self.instance.state == "完成" and \
(value == "作废")) :
return value
raise serializers.ValidationError("不能从" + self.instance.state + "更新到" + value)
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = ProductManageModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 物料预警规则子项创建 序列化器
class MaterialWaringRuleItemSerialize_Create(serializers.ModelSerializer) :
"""
物料预警规则子项创建--create
"""
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = MaterialWaringRuleItemModel
fields = ("id", "warehouse_code", "material_id", "batch", "minimum", "maximum",
"lowthreshold", "highthreshold", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user")
def validate(self, attrs) :
try :
warehouse = WarehouseDefinitionModel.objects.get(code=attrs["warehouse_code"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓库不存在")
try :
material = MaterialInforDefinitionModel.objects.get(id=attrs["material_id"]) # 判断指定的物料是否存在
except Exception as e :
raise serializers.ValidationError("指定的物料不存在")
attrs["warehouse_name"] = warehouse.name # 获取仓库名称
attrs["materialType_code"] = material.type.code # 获取物料类型编码
attrs["materialType_name"] = material.type.name # 获取物料类型名称
attrs["material_code"] = material.code # 获取物料编码
attrs["material_name"] = material.name # 获取物料名称
return attrs
class MaterialWaringRuleItemSerialize_List(serializers.ModelSerializer) :
"""
物料预警规则子项创建--list
"""
class Meta :
model = MaterialWaringRuleItemModel
fields = "__all__"
# endregion
# region 物料预警规则创建 序列化器
class MaterialWaringRuleSerialize_Create(serializers.ModelSerializer) :
"""
物料预警规则创建--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = MaterialWaringRuleModel
fields = ("id", "name", "code", "state", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "create_user")
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_materialwaringrulemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_materialwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class MaterialWaringRuleSerialize_List(serializers.ModelSerializer) :
"""
物料预警规则创建--list
"""
class Meta :
model = MaterialWaringRuleModel
fields = ("id", "name", "code", "state", "auditor", "create_user","create_time","update_time")
class MaterialWaringRuleSerialize_Retrieve(serializers.ModelSerializer) :
"""
物料预警规则创建--retrieve
"""
file = WarehouseFileSerialize_List(many=True)
child = MaterialWaringRuleItemSerialize_List(many=True)
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = MaterialWaringRuleModel
fields = "__all__"
class MaterialWaringRuleSerialize_Update(serializers.ModelSerializer) :
"""
物料预警规则创建--update
"""
class Meta :
model = MaterialWaringRuleModel
fields = ("id", "name", "code", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_materialwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class MaterialWaringRuleSerialize_Partial(serializers.ModelSerializer) :
"""
物料预警规则创建--partial
"""
class Meta :
model = MaterialWaringRuleModel
fields = ("id", "state", "alter")
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
return attrs
# 状态字段验证
def validate_state(self, value) :
validate_states(self.instance.state, value)
if (self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = MaterialWaringRuleModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 半成品预警规则子项创建 序列化器
class SemifinishedWaringRuleItemSerialize_Create(serializers.ModelSerializer) :
"""
半成品预警规则子项创建--create
"""
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = SemifinishedWaringRuleItemModel
fields = ("id", "warehouse_code", "semifinished_id", "batch", "minimum", "maximum",
"lowthreshold", "highthreshold", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user")
def validate(self, attrs) :
try :
warehouse = WarehouseDefinitionModel.objects.get(code=attrs["warehouse_code"]) # 判断指定的仓位是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓库不存在")
try :
semifinished = SemifinishedInforDefinitionModel.objects.get(id=attrs["semifinished_id"]) # 判断指定的半成品是否存在
except Exception as e :
raise serializers.ValidationError("指定的半成品不存在")
attrs["warehouse_name"] = warehouse.name # 获取仓库名称
attrs["semifinishedType_code"] = semifinished.type.code # 获取半成品类型编码
attrs["semifinishedType_name"] = semifinished.type.name # 获取半成品类型名称
attrs["semifinished_code"] = semifinished.code # 获取半成品编码
attrs["semifinished_name"] = semifinished.name # 获取半成品名称
return attrs
class SemifinishedWaringRuleItemSerialize_List(serializers.ModelSerializer) :
"""
半成品预警规则子项创建--list
"""
class Meta :
model = SemifinishedWaringRuleItemModel
fields = "__all__"
# endregion
# region 半成品预警规则创建 序列化器
class SemifinishedWaringRuleSerialize_Create(serializers.ModelSerializer) :
"""
半成品预警规则创建--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = SemifinishedWaringRuleModel
fields = ("id", "name", "code", "state", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "create_user")
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_semifinishedwaringrulemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_semifinishedwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class SemifinishedWaringRuleSerialize_List(serializers.ModelSerializer) :
"""
半成品预警规则创建--list
"""
class Meta :
model = SemifinishedWaringRuleModel
fields = ("id", "name", "code", "state", "auditor", "create_user","create_time","update_time")
class SemifinishedWaringRuleSerialize_Retrieve(serializers.ModelSerializer) :
"""
半成品预警规则创建--retrieve
"""
file = WarehouseFileSerialize_List(many=True)
child = SemifinishedWaringRuleItemSerialize_List(many=True)
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = SemifinishedWaringRuleModel
fields = "__all__"
class SemifinishedWaringRuleSerialize_Update(serializers.ModelSerializer) :
"""
半成品预警规则创建--update
"""
class Meta :
model = SemifinishedWaringRuleModel
fields = ("id", "name", "code", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_semifinishedwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class SemifinishedWaringRuleSerialize_Partial(serializers.ModelSerializer) :
"""
半成品预警规则创建--partial
"""
class Meta :
model = SemifinishedWaringRuleModel
fields = ("id", "state", "alter")
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
return attrs
# 状态字段验证
def validate_state(self, value) :
validate_states(self.instance.state, value)
if (self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = SemifinishedWaringRuleModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
# region 产品预警规则子项创建 序列化器
class ProductWaringRuleItemSerialize_Create(serializers.ModelSerializer) :
"""
产品预警规则子项创建--create
"""
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = ProductWaringRuleItemModel
fields = ("id", "warehouse_code", "product_id", "batch", "minimum", "maximum",
"lowthreshold", "highthreshold", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "create_user")
def validate(self, attrs) :
try :
warehouse = WarehouseDefinitionModel.objects.get(code=attrs["warehouse_code"]) # 判断指定的仓库是否存在
except Exception as e :
raise serializers.ValidationError("指定的仓库不存在")
try :
product = ProductInforDefinitionModel.objects.get(id=attrs["product_id"]) # 判断指定的产品是否存在
except Exception as e :
raise serializers.ValidationError("指定的产品不存在")
attrs["warehouse_name"] = warehouse.name # 获取仓库名称
attrs["productType_code"] = product.type.code # 获取产品类型编码
attrs["productType_name"] = product.type.name # 获取产品类型名称
attrs["product_code"] = product.code # 获取产品编码
attrs["product_name"] = product.name # 获取产品名称
return attrs
class ProductWaringRuleItemSerialize_List(serializers.ModelSerializer) :
"""
产品预警规则子项创建--list
"""
class Meta :
model = ProductWaringRuleItemModel
fields = "__all__"
# endregion
# region 产品预警规则创建 序列化器
class ProductWaringRuleSerialize_Create(serializers.ModelSerializer) :
"""
产品预警规则创建--create
"""
state = serializers.HiddenField(default="新建")
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
class Meta :
model = ProductWaringRuleModel
fields = ("id", "name", "code", "state", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor", "create_user")
# 所有字段验证
def validate(self, attrs) :
if not attrs["create_user"].has_perm('warehouse.add_productwaringrulemodel') : # 如果当前用户没有创建权限
raise serializers.ValidationError("当前用户不具备创建权限'")
if settings.SAME_USER != True :
if attrs["create_user"].username == attrs["auditor"] : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_productwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class ProductWaringRuleSerialize_List(serializers.ModelSerializer) :
"""
产品预警规则创建--list
"""
class Meta :
model = ProductWaringRuleModel
fields = ("id", "name", "code", "state", "auditor", "create_user","create_time","update_time")
class ProductWaringRuleSerialize_Retrieve(serializers.ModelSerializer) :
"""
产品预警规则创建--retrieve
"""
file = WarehouseFileSerialize_List(many=True)
child = ProductWaringRuleItemSerialize_List(many=True)
alter = WarehouseAlterRecordSerialize_List(many=True)
class Meta :
model = ProductWaringRuleModel
fields = "__all__"
class ProductWaringRuleSerialize_Update(serializers.ModelSerializer) :
"""
产品预警规则创建--update
"""
class Meta :
model = ProductWaringRuleModel
fields = ("id", "name", "code", "file", "child", "attribute1", "attribute2",
"attribute3", "attribute4", "attribute5", "desc", "auditor")
# 所有字段验证
def validate(self, attrs) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
return attrs
# 审核者字段验证
def validate_auditor(self, value) :
if self.instance.state != '新建' : # 如果不是新建状态 不能更改信息
raise serializers.ValidationError("当前信息已提交,禁止更改")
if settings.SAME_USER != True :
if self.instance.create_user == value : # 审核帐号不能与创建帐号相同
raise serializers.ValidationError("审核帐号不能与创建帐号相同'")
try :
auditor = User.objects.get(username=value)
except Exception as e :
raise serializers.ValidationError("指定的审核账号不存在")
if not auditor.has_perm('warehouse.admin_productwaringrulemodel') :
raise serializers.ValidationError("指定的审核账号不具备审核权限")
return value
class ProductWaringRuleSerialize_Partial(serializers.ModelSerializer) :
"""
产品预警规则创建--partial
"""
class Meta :
model = ProductWaringRuleModel
fields = ("id", "state", "alter")
# 所有字段验证
def validate(self, attrs) :
try :
del attrs['alter'] # 删除alter字段
except Exception :
pass
return attrs
# 状态字段验证
def validate_state(self, value) :
validate_states(self.instance.state, value)
if (self.instance.create_user == self.context['request'].user.username) and \
(self.instance.auditor != self.context['request'].user.username) : # 如果当前用户为创建账号但不是审核账号
if not (self.instance.state == "新建" and (value == "审核中" or value == "作废")) :
raise serializers.ValidationError("创建者只能将[新建]信息更改成[审核中]或[作废]")
return value
# 审核记录字段验证
def validate_alter(self, value) :
obj = ProductWaringRuleModel.objects.get(id=self.instance.id).alter
for data in value :
obj.add(data.id)
return value
# endregion
| 44.541317
| 133
| 0.591964
| 9,620
| 111,576
| 6.742308
| 0.03368
| 0.088435
| 0.088898
| 0.024699
| 0.912551
| 0.904041
| 0.899061
| 0.893742
| 0.88184
| 0.871757
| 0
| 0.003278
| 0.305442
| 111,576
| 2,504
| 134
| 44.559105
| 0.833682
| 0.104099
| 0
| 0.897051
| 0
| 0
| 0.130467
| 0.024939
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042359
| false
| 0.00429
| 0.005362
| 0
| 0.17319
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7080f59366ca622d25a3dd71abe90e9d36f22207
| 2,297
|
py
|
Python
|
pepdb/core/migrations/0114_auto_20170907_2314.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 7
|
2015-12-21T03:52:46.000Z
|
2020-07-24T19:17:23.000Z
|
pepdb/core/migrations/0114_auto_20170907_2314.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 12
|
2016-03-05T18:11:05.000Z
|
2021-06-17T20:20:03.000Z
|
pepdb/core/migrations/0114_auto_20170907_2314.py
|
dchaplinsky/pep.org.ua
|
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
|
[
"MIT"
] | 4
|
2016-07-17T20:19:38.000Z
|
2021-03-23T12:47:20.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-07 20:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0113_auto_20170905_1435'),
]
operations = [
migrations.AlterField(
model_name='company',
name='state_company',
field=models.BooleanField(default=False, verbose_name='\u041a\u0435\u0440\u0456\u0432\u043d\u0438\u043a \u2014 \u041f\u0415\u041f'),
),
migrations.AlterField(
model_name='company',
name='status',
field=models.IntegerField(choices=[(0, '\u0456\u043d\u0444\u043e\u0440\u043c\u0430\u0446\u0456\u044f \u0432\u0456\u0434\u0441\u0443\u0442\u043d\u044f'), (1, '\u0437\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u043e\u0432\u0430\u043d\u043e'), (2, '\u043f\u0440\u0438\u043f\u0438\u043d\u0435\u043d\u043e'), (3, '\u0432 \u0441\u0442\u0430\u043d\u0456 \u043f\u0440\u0438\u043f\u0438\u043d\u0435\u043d\u043d\u044f'), (4, '\u0437\u0430\u0440\u0435\u0454\u0441\u0442\u0440\u043e\u0432\u0430\u043d\u043e, \u0441\u0432\u0456\u0434\u043e\u0446\u0442\u0432\u043e \u043f\u0440\u043e \u0434\u0435\u0440\u0436\u0430\u0432\u043d\u0443 \u0440\u0435\u0454\u0441\u0442\u0440\u0430\u0446\u0456\u044e \u043d\u0435\u0434\u0456\u0439\u0441\u043d\u0435'), (5, '\u043f\u043e\u0440\u0443\u0448\u0435\u043d\u043e \u0441\u043f\u0440\u0430\u0432\u0443 \u043f\u0440\u043e \u0431\u0430\u043d\u043a\u0440\u0443\u0442\u0441\u0442\u0432\u043e'), (6, '\u043f\u043e\u0440\u0443\u0448\u0435\u043d\u043e \u0441\u043f\u0440\u0430\u0432\u0443 \u043f\u0440\u043e \u0431\u0430\u043d\u043a\u0440\u0443\u0442\u0441\u0442\u0432\u043e (\u0441\u0430\u043d\u0430\u0446\u0456\u044f)'), (7, '\u0440\u043e\u0437\u043f\u043e\u0440\u044f\u0434\u0436\u0435\u043d\u043d\u044f \u043c\u0430\u0439\u043d\u043e\u043c'), (8, '\u043b\u0456\u043a\u0432\u0456\u0434\u0430\u0446\u0456\u044f')], default=0, verbose_name='\u041f\u043e\u0442\u043e\u0447\u043d\u0438\u0439 \u0441\u0442\u0430\u043d'),
),
migrations.AddIndex(
model_name='declaration',
index=models.Index(fields=['confirmed', 'fuzziness', 'batch_number'], name='core_declar_confirm_961c7e_idx'),
),
]
| 76.566667
| 1,443
| 0.715716
| 318
| 2,297
| 5.110063
| 0.295597
| 0.043077
| 0.036923
| 0.036923
| 0.369231
| 0.369231
| 0.301538
| 0.301538
| 0.252308
| 0.252308
| 0
| 0.423304
| 0.114497
| 2,297
| 29
| 1,444
| 79.206897
| 0.375615
| 0.029604
| 0
| 0.318182
| 1
| 0.409091
| 0.669811
| 0.5885
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7091cb63c9218a9965fbd4b09b5342ef30bd2aa3
| 6,839
|
py
|
Python
|
test/test_wavenet.py
|
sw005320/PytorchWaveNetVocoder
|
b92d7af7d5f2794291e0d462694c0719f75ca469
|
[
"Apache-2.0"
] | 1
|
2021-01-18T06:22:30.000Z
|
2021-01-18T06:22:30.000Z
|
test/test_wavenet.py
|
sw005320/PytorchWaveNetVocoder
|
b92d7af7d5f2794291e0d462694c0719f75ca469
|
[
"Apache-2.0"
] | null | null | null |
test/test_wavenet.py
|
sw005320/PytorchWaveNetVocoder
|
b92d7af7d5f2794291e0d462694c0719f75ca469
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 Tomoki Hayashi (Nagoya University)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
from __future__ import absolute_import
import logging
import numpy as np
import torch
from torch.autograd import Variable
from wavenet import encode_mu_law
from wavenet import initialize
from wavenet import WaveNet
# set log level
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s',
datefmt='%m/%d/%Y %I:%M:%S')
def sine_generator(seq_size=100, mu=256):
t = np.linspace(0, 1, 16000)
data = np.sin(2 * np.pi * 220 * t) + np.sin(2 * np.pi * 224 * t)
data = data / 2
while True:
ys = data[:seq_size]
ys = encode_mu_law(data, mu)
yield Variable(torch.from_numpy(ys[:seq_size]))
def test_forward():
# get batch
generator = sine_generator(100)
batch = next(generator)
batch_input = batch.view(1, -1)
batch_aux = Variable(torch.rand(1, 28, batch_input.size(1)).float())
# define model without upsampling with kernel size = 2
net = WaveNet(256, 28, 32, 128, 10, 1, 2)
net.apply(initialize)
net.eval()
y = net(batch_input, batch_aux)[0]
assert y.size(0) == batch_input.size(1)
assert y.size(1) == 256
# define model without upsampling with kernel size = 3
net = WaveNet(256, 28, 32, 128, 10, 1, 2)
net.apply(initialize)
net.eval()
y = net(batch_input, batch_aux)[0]
assert y.size(0) == batch_input.size(1)
assert y.size(1) == 256
batch_input = batch.view(1, -1)
batch_aux = Variable(torch.rand(1, 28, batch_input.size(1) // 10).float())
# define model with upsampling and kernel size = 2
net = WaveNet(256, 28, 32, 128, 10, 1, 2, 10)
net.apply(initialize)
net.eval()
y = net(batch_input, batch_aux)[0]
assert y.size(0) == batch_input.size(1)
assert y.size(1) == 256
# define model with upsampling and kernel size = 3
net = WaveNet(256, 28, 32, 128, 10, 1, 3, 10)
net.apply(initialize)
net.eval()
y = net(batch_input, batch_aux)[0]
assert y.size(0) == batch_input.size(1)
assert y.size(1) == 256
def test_generate():
# get batch
batch = 2
x = np.random.randint(0, 256, size=(batch, 1))
h = np.random.randn(batch, 28, 100)
length = h.shape[-1] - 1
# define model without upsampling and with kernel size = 2
net = WaveNet(256, 28, 16, 32, 10, 3, 2)
net.apply(initialize)
net.eval()
# sample-by-sample generation
gen1_list = []
gen2_list = []
for x_, h_ in zip(x, h):
batch_x = Variable(torch.from_numpy(np.expand_dims(x_, 0)).long())
batch_h = Variable(torch.from_numpy(np.expand_dims(h_, 0)).float())
gen1 = net.generate(batch_x, batch_h, length, 1, "argmax")
gen2 = net.fast_generate(batch_x, batch_h, length, 1, "argmax")
np.testing.assert_array_equal(gen1, gen2)
gen1_list += [gen1]
gen2_list += [gen2]
gen1 = np.stack(gen1_list)
gen2 = np.stack(gen2_list)
np.testing.assert_array_equal(gen1, gen2)
# batch generation
batch_x = Variable(torch.from_numpy(x).long())
batch_h = Variable(torch.from_numpy(h).float())
gen3_list = net.batch_fast_generate(batch_x, batch_h, [length] * batch, 1, "argmax")
gen3 = np.stack(gen3_list)
np.testing.assert_array_equal(gen3, gen2)
# define model without upsampling and with kernel size = 3
net = WaveNet(256, 28, 16, 32, 10, 3, 3)
net.apply(initialize)
net.eval()
# sample-by-sample generation
gen1_list = []
gen2_list = []
for x_, h_ in zip(x, h):
batch_x = Variable(torch.from_numpy(np.expand_dims(x_, 0)).long())
batch_h = Variable(torch.from_numpy(np.expand_dims(h_, 0)).float())
gen1 = net.generate(batch_x, batch_h, length, 1, "argmax")
gen2 = net.fast_generate(batch_x, batch_h, length, 1, "argmax")
np.testing.assert_array_equal(gen1, gen2)
gen1_list += [gen1]
gen2_list += [gen2]
gen1 = np.stack(gen1_list)
gen2 = np.stack(gen2_list)
np.testing.assert_array_equal(gen1, gen2)
# batch generation
batch_x = Variable(torch.from_numpy(x).long())
batch_h = Variable(torch.from_numpy(h).float())
gen3_list = net.batch_fast_generate(batch_x, batch_h, [length] * batch, 1, "argmax")
gen3 = np.stack(gen3_list)
np.testing.assert_array_equal(gen3, gen2)
# get batch
batch = 2
upsampling_factor = 10
x = np.random.randint(0, 256, size=(batch, 1))
h = np.random.randn(batch, 28, 10)
length = h.shape[-1] * upsampling_factor - 1
# define model with upsampling and with kernel size = 2
net = WaveNet(256, 28, 16, 32, 10, 3, 2, upsampling_factor)
net.apply(initialize)
net.eval()
# sample-by-sample generation
gen1_list = []
gen2_list = []
for x_, h_ in zip(x, h):
batch_x = Variable(torch.from_numpy(np.expand_dims(x_, 0)).long())
batch_h = Variable(torch.from_numpy(np.expand_dims(h_, 0)).float())
gen1 = net.generate(batch_x, batch_h, length, 1, "argmax")
gen2 = net.fast_generate(batch_x, batch_h, length, 1, "argmax")
np.testing.assert_array_equal(gen1, gen2)
gen1_list += [gen1]
gen2_list += [gen2]
gen1 = np.stack(gen1_list)
gen2 = np.stack(gen2_list)
np.testing.assert_array_equal(gen1, gen2)
# batch generation
batch_x = Variable(torch.from_numpy(x).long())
batch_h = Variable(torch.from_numpy(h).float())
gen3_list = net.batch_fast_generate(batch_x, batch_h, [length] * batch, 1, "argmax")
gen3 = np.stack(gen3_list)
np.testing.assert_array_equal(gen3, gen2)
# define model with upsampling and with kernel size = 3
net = WaveNet(256, 28, 16, 32, 10, 3, 2, upsampling_factor)
net.apply(initialize)
net.eval()
# sample-by-sample generation
gen1_list = []
gen2_list = []
for x_, h_ in zip(x, h):
batch_x = Variable(torch.from_numpy(np.expand_dims(x_, 0)).long())
batch_h = Variable(torch.from_numpy(np.expand_dims(h_, 0)).float())
gen1 = net.generate(batch_x, batch_h, length, 1, "argmax")
gen2 = net.fast_generate(batch_x, batch_h, length, 1, "argmax")
np.testing.assert_array_equal(gen1, gen2)
gen1_list += [gen1]
gen2_list += [gen2]
gen1 = np.stack(gen1_list)
gen2 = np.stack(gen2_list)
np.testing.assert_array_equal(gen1, gen2)
# batch generation
batch_x = Variable(torch.from_numpy(x).long())
batch_h = Variable(torch.from_numpy(h).float())
gen3_list = net.batch_fast_generate(batch_x, batch_h, [length] * batch, 1, "argmax")
gen3 = np.stack(gen3_list)
np.testing.assert_array_equal(gen3, gen2)
| 34.366834
| 92
| 0.641176
| 1,043
| 6,839
| 4.024928
| 0.123682
| 0.028585
| 0.068842
| 0.08909
| 0.826584
| 0.82182
| 0.82182
| 0.800381
| 0.787994
| 0.787994
| 0
| 0.061247
| 0.21933
| 6,839
| 198
| 93
| 34.540404
| 0.725042
| 0.116976
| 0
| 0.763889
| 0
| 0.006944
| 0.025104
| 0.003824
| 0
| 0
| 0
| 0
| 0.138889
| 1
| 0.020833
| false
| 0
| 0.055556
| 0
| 0.076389
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5625a2f2f649212d4e034063545d90e2b8899a3d
| 5,511
|
py
|
Python
|
moonclient/moonclient/action_assignments.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
moonclient/moonclient/action_assignments.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
moonclient/moonclient/action_assignments.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
# This software is distributed under the terms and conditions of the 'Apache-2.0'
# license which can be found in the file 'LICENSE' in this package distribution
# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
import logging
from cliff.lister import Lister
from cliff.command import Command
class ActionAssignmentsList(Lister):
"""List all action assignments."""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(ActionAssignmentsList, self).get_parser(prog_name)
parser.add_argument(
'action_id',
metavar='<action-uuid>',
help='Action UUID',
)
parser.add_argument(
'action_category_id',
metavar='<action-category-uuid>',
help='Action category UUID',
)
parser.add_argument(
'--intraextension',
metavar='<intraextension-uuid>',
help='IntraExtension UUID',
)
return parser
def __get_scope_from_id(self, intraextension_id, action_category_id, action_scope_id):
data = self.app.get_url(self.app.url_prefix+"/intra_extensions/{}/action_scopes/{}".format(
intraextension_id, action_category_id),
authtoken=True)
if action_scope_id in data:
return data[action_scope_id]
def take_action(self, parsed_args):
if not parsed_args.intraextension:
parsed_args.intraextension = self.app.intraextension
data = self.app.get_url(self.app.url_prefix+"/intra_extensions/{}/action_assignments/{}/{}".format(
parsed_args.intraextension, parsed_args.action_id, parsed_args.action_category_id),
authtoken=True)
return (
("id", "name"),
((_id, self.__get_scope_from_id(parsed_args.intraextension,
parsed_args.action_category_id,
_id)['name']) for _id in data)
)
class ActionAssignmentsAdd(Command):
"""Add a new action assignment."""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(ActionAssignmentsAdd, self).get_parser(prog_name)
parser.add_argument(
'action_id',
metavar='<action-uuid>',
help='Action UUID',
)
parser.add_argument(
'action_category_id',
metavar='<action-category-uuid>',
help='Action category UUID',
)
parser.add_argument(
'action_scope_id',
metavar='<action-scope-uuid>',
help='Action scope UUID',
)
parser.add_argument(
'--intraextension',
metavar='<intraextension-uuid>',
help='IntraExtension UUID',
)
return parser
def __get_scope_from_id(self, intraextension_id, action_category_id, action_scope_id):
data = self.app.get_url(self.app.url_prefix+"/intra_extensions/{}/action_scopes/{}".format(
intraextension_id, action_category_id),
authtoken=True)
if action_scope_id in data:
return data[action_scope_id]
def take_action(self, parsed_args):
if not parsed_args.intraextension:
parsed_args.intraextension = self.app.intraextension
data = self.app.get_url(self.app.url_prefix+"/intra_extensions/{}/action_assignments".format(parsed_args.intraextension),
post_data={
"action_id": parsed_args.action_id,
"action_category_id": parsed_args.action_category_id,
"action_scope_id": parsed_args.action_scope_id},
authtoken=True)
return (
("id", "name"),
((_id, self.__get_scope_from_id(parsed_args.intraextension,
parsed_args.action_category_id,
_id)['name']) for _id in data)
)
class ActionAssignmentsDelete(Command):
"""Delete an action assignment."""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(ActionAssignmentsDelete, self).get_parser(prog_name)
parser.add_argument(
'action_id',
metavar='<action-uuid>',
help='Action UUID',
)
parser.add_argument(
'action_category_id',
metavar='<action-category-uuid>',
help='Action category UUID',
)
parser.add_argument(
'action_scope_id',
metavar='<action-scope-uuid>',
help='Action scope UUID',
)
parser.add_argument(
'--intraextension',
metavar='<intraextension-uuid>',
help='IntraExtension UUID',
)
return parser
def take_action(self, parsed_args):
if not parsed_args.intraextension:
parsed_args.intraextension = self.app.intraextension
self.app.get_url(self.app.url_prefix+"/intra_extensions/{}/action_assignments/{}/{}/{}".format(
parsed_args.intraextension,
parsed_args.action_id,
parsed_args.action_category_id,
parsed_args.action_scope_id),
method="DELETE",
authtoken=True
)
| 36.986577
| 129
| 0.589548
| 569
| 5,511
| 5.414763
| 0.163445
| 0.077897
| 0.067511
| 0.059721
| 0.831548
| 0.823759
| 0.791626
| 0.791626
| 0.791626
| 0.791626
| 0
| 0.002103
| 0.309563
| 5,511
| 149
| 130
| 36.986577
| 0.807622
| 0.066957
| 0
| 0.707317
| 0
| 0
| 0.160617
| 0.065379
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065041
| false
| 0
| 0.02439
| 0
| 0.195122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
568a5ac245a25f1437696c117f03742ed8394113
| 153
|
py
|
Python
|
main/states/__init__.py
|
tmccormi/susi_linux
|
c9551bd6313f88aea5d3b531558e05aebe1a00a9
|
[
"Apache-2.0"
] | 2
|
2019-12-30T20:34:22.000Z
|
2019-12-30T20:38:50.000Z
|
main/states/__init__.py
|
NoorHasanShaik86/susi_linux
|
8bb663262b62dc7eb8d79ecde823b8e97df4387d
|
[
"Apache-2.0"
] | 1
|
2021-06-25T15:31:14.000Z
|
2021-06-25T15:31:14.000Z
|
main/states/__init__.py
|
NoorHasanShaik86/susi_linux
|
8bb663262b62dc7eb8d79ecde823b8e97df4387d
|
[
"Apache-2.0"
] | 1
|
2019-02-22T04:19:19.000Z
|
2019-02-22T04:19:19.000Z
|
"""This module defines all the states and their actions.
"""
from .susi_state_machine import SusiStateMachine
from .susi_state_machine import Components
| 30.6
| 56
| 0.823529
| 21
| 153
| 5.809524
| 0.761905
| 0.131148
| 0.213115
| 0.327869
| 0.42623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 153
| 4
| 57
| 38.25
| 0.903704
| 0.346405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5696c5449196860f44e6cea91f4c685e6c00fb53
| 129
|
py
|
Python
|
python_purify/__init__.py
|
kingthomasc/python-purify
|
49efca0a0a6273ba6fe99810e9e0ce1b6c21123c
|
[
"MIT"
] | 5
|
2016-03-30T12:39:18.000Z
|
2019-04-05T05:38:55.000Z
|
python_purify/__init__.py
|
kingthomasc/python-purify
|
49efca0a0a6273ba6fe99810e9e0ce1b6c21123c
|
[
"MIT"
] | 7
|
2015-11-05T16:01:28.000Z
|
2019-09-08T18:28:42.000Z
|
python_purify/__init__.py
|
kingthomasc/python-purify
|
49efca0a0a6273ba6fe99810e9e0ce1b6c21123c
|
[
"MIT"
] | 10
|
2015-11-05T00:17:40.000Z
|
2019-09-06T03:51:24.000Z
|
from __future__ import absolute_import
from .core import ImagePurify
from .core import WordPurify
from .core import VideoPurify
| 21.5
| 38
| 0.844961
| 17
| 129
| 6.117647
| 0.470588
| 0.230769
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131783
| 129
| 5
| 39
| 25.8
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3b15123cec0ed744d6754538a13ca86fb51763c4
| 1,729
|
py
|
Python
|
firecrown/ccl/likelihoods/tests/test_tdist.py
|
LSSTDESC/firecrown
|
646c15809b48a528a833d2bef3b180b91c3af189
|
[
"BSD-3-Clause"
] | 15
|
2018-11-27T20:41:07.000Z
|
2022-02-23T19:20:02.000Z
|
firecrown/ccl/likelihoods/tests/test_tdist.py
|
LSSTDESC/firecrown
|
646c15809b48a528a833d2bef3b180b91c3af189
|
[
"BSD-3-Clause"
] | 75
|
2018-10-17T13:46:07.000Z
|
2021-08-12T08:22:49.000Z
|
firecrown/ccl/likelihoods/tests/test_tdist.py
|
LSSTDESC/firecrown
|
646c15809b48a528a833d2bef3b180b91c3af189
|
[
"BSD-3-Clause"
] | 2
|
2019-02-08T14:31:02.000Z
|
2022-03-07T05:21:23.000Z
|
import numpy as np
from ..tdist import TdistLogLike
def test_likelihood_tdist_smoke(likelihood_test_data):
nu = 25
ll = TdistLogLike(
data_vector=likelihood_test_data['data_vector'],
nu=nu)
ll.read(
likelihood_test_data['sacc_data'],
likelihood_test_data['sources'],
likelihood_test_data['statistics'])
assert ll.data_vector == likelihood_test_data['data_vector']
delta = likelihood_test_data['delta']
data = likelihood_test_data['data']
theory = likelihood_test_data['theory']
cov = likelihood_test_data['cov']
chi2 = np.dot(delta, np.dot(np.linalg.inv(cov), delta))
loglike = -0.5 * nu * np.log(1.0 + chi2 / (nu - 1.0))
assert np.allclose(loglike, ll.compute(data, theory))
dv = np.concatenate([data[v] for v in ll.data_vector])
assert np.allclose(ll.assemble_data_vector(data), dv)
def test_likelihood_tdist_subset(likelihood_test_data):
nu = 25
ll = TdistLogLike(
data_vector=["stat_src0_src0", "stat_src0_src1"],
nu=nu)
ll.read(
likelihood_test_data['sacc_data'],
likelihood_test_data['sources'],
likelihood_test_data['statistics'])
assert ll.data_vector == ["stat_src0_src0", "stat_src0_src1"]
delta = likelihood_test_data['delta'][0:4]
data = likelihood_test_data['data']
theory = likelihood_test_data['theory']
cov = likelihood_test_data['cov'][0:4, 0:4]
chi2 = np.dot(delta, np.dot(np.linalg.inv(cov), delta))
loglike = -0.5 * nu * np.log(1.0 + chi2 / (nu - 1.0))
assert np.allclose(loglike, ll.compute(data, theory))
dv = np.concatenate([data[v] for v in ll.data_vector])
assert np.allclose(ll.assemble_data_vector(data), dv)
| 34.58
| 65
| 0.6738
| 248
| 1,729
| 4.439516
| 0.185484
| 0.228883
| 0.294278
| 0.079927
| 0.906449
| 0.855586
| 0.855586
| 0.804723
| 0.761126
| 0.677566
| 0
| 0.024268
| 0.189705
| 1,729
| 49
| 66
| 35.285714
| 0.761599
| 0
| 0
| 0.7
| 0
| 0
| 0.096009
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3b217ea70c4f38f06edb6ebe7caa232e479f50d0
| 366
|
py
|
Python
|
beer/breweries/helpers/__init__.py
|
kevinpanaro/api
|
c1860ba05bbd17c9a675e08172ee5a6640e87597
|
[
"MIT"
] | null | null | null |
beer/breweries/helpers/__init__.py
|
kevinpanaro/api
|
c1860ba05bbd17c9a675e08172ee5a6640e87597
|
[
"MIT"
] | 2
|
2021-03-31T18:46:53.000Z
|
2021-12-13T19:49:11.000Z
|
beer/breweries/helpers/__init__.py
|
kevinpanaro/api
|
c1860ba05bbd17c9a675e08172ee5a6640e87597
|
[
"MIT"
] | null | null | null |
from .helpers import (
beautiful_url,
save_beer,
format_beer_dict,
b_id,
get_id,
set_id,
reset_id,
valid_url
)
__all__ = [
"beautiful_url",
"save_beer",
"format_beer_dict",
"b_id",
"get_id",
"set_id",
"reset_id",
"valid_url",
]
| 15.913043
| 30
| 0.453552
| 38
| 366
| 3.789474
| 0.421053
| 0.166667
| 0.222222
| 0.277778
| 0.861111
| 0.861111
| 0.861111
| 0.861111
| 0.861111
| 0.861111
| 0
| 0
| 0.445355
| 366
| 22
| 31
| 16.636364
| 0.70936
| 0
| 0
| 0
| 0
| 0
| 0.193989
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3b4b793d20660d5ffbfc41fcf74b2c43abd3cc2b
| 38,909
|
py
|
Python
|
api/object_counting_api.py
|
dan1keen/dissertation_counter
|
1265ee9563d349849c9a68d204e0f427e33f0f48
|
[
"MIT"
] | null | null | null |
api/object_counting_api.py
|
dan1keen/dissertation_counter
|
1265ee9563d349849c9a68d204e0f427e33f0f48
|
[
"MIT"
] | null | null | null |
api/object_counting_api.py
|
dan1keen/dissertation_counter
|
1265ee9563d349849c9a68d204e0f427e33f0f48
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import csv
import cv2
import numpy as np
import mysql.connector
from mysql.connector import Error
from mysql.connector import errorcode
from utils import visualization_utils as vis_util
from datetime import datetime
# Variables
total_passed_vehicle = 0 # using it to count vehicles
def saveVehicle(object_name, count, date, output_name):
try:
connection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = connection.cursor()
sql_insert_query = """ INSERT INTO `vehicle`
(`object`, `count`, `date`, `name_text`) VALUES (%s,%s,%s,%s)"""
insert_tuple = (object_name, count, date, output_name)
result = cursor.execute(sql_insert_query, insert_tuple)
connection.commit()
print("Record inserted successfully into python_users table")
except mysql.connector.Error as error:
connection.rollback() # rollback if any exception occured
print("Failed inserting record into items table {}".format(error))
finally:
# closing database connection.
if (connection.is_connected()):
cursor.close()
connection.close()
print("MySQL connection is closed")
def savePedestrian(object_name, count, date, output_name):
try:
connection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = connection.cursor()
sql_insert_query = """ INSERT INTO `pedestrian`
(`object`, `count`, `date`, `name_text`) VALUES (%s,%s,%s,%s)"""
insert_tuple = (object_name, count, date, output_name)
result = cursor.execute(sql_insert_query, insert_tuple)
connection.commit()
print("Record inserted successfully into python_users table")
except mysql.connector.Error as error:
connection.rollback() # rollback if any exception occured
print("Failed inserting record into items table {}".format(error))
finally:
# closing database connection.
if (connection.is_connected()):
cursor.close()
connection.close()
print("MySQL connection is closed")
def cumulative_object_counting_x_axis(input_video, detection_graph, category_index, is_color_recognition_enabled,
targeted_objects, fps, roi, deviation):
total_passed_vehicle = 0
# initialize .csv
with open('object_counting_report.csv', 'w') as f:
writer = csv.writer(f)
csv_line = "Object Type, Object Color, Object Movement Direction, Object Speed (km/h)"
writer.writerows([csv_line.split(',')])
# input video
cap = cv2.VideoCapture(input_video)
if cap.isOpened():
# get cap property
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
output_movie = cv2.VideoWriter('the_object_x_axis.avi', fourcc, fps, (width, height))
total_passed_vehicle = 0
speed = "waiting..."
direction = "waiting..."
size = "waiting..."
color = "waiting..."
counting_mode = "..."
width_heigh_taken = True
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# for all the frames that are extracted from input video
while (cap.isOpened()):
ret, frame = cap.read()
if not ret:
print("end of the video file...")
break
input_frame = frame
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(input_frame, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
# Visualization of the results of a detection.
counter, csv_line, counting_mode = vis_util.visualize_boxes_and_labels_on_image_array_x_axis(cap.get(1),
input_frame,
1,
is_color_recognition_enabled,
np.squeeze(
boxes),
np.squeeze(
classes).astype(
np.int32),
np.squeeze(
scores),
category_index,
targeted_objects=targeted_objects,
x_reference=roi,
deviation=deviation,
use_normalized_coordinates=True,
line_thickness=4)
# when the vehicle passed over line and counted, make the color of ROI line green
if counter == 1:
cv2.line(input_frame, (roi, 0), (roi, height), (0, 0xFF, 0), 5)
else:
cv2.line(input_frame, (roi, 0), (roi, height), (0, 0, 0xFF), 5)
total_passed_vehicle = total_passed_vehicle + counter
time = datetime.now().strftime('%Y-%m-%d %H:%M')
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(
input_frame,
'Detected Pedestrians: ' + str(total_passed_vehicle),
(10, 35),
font,
0.8,
(0, 0xFF, 0xFF),
2,
cv2.FONT_HERSHEY_SIMPLEX,
)
cv2.putText(
input_frame,
'ROI Line',
(545, roi - 10),
font,
0.6,
(0, 0, 0xFF),
2,
cv2.LINE_AA,
)
output_movie.write(input_frame)
print("writing frame")
cv2.imshow('object counting', input_frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
'''if(csv_line != "not_available"):
with open('traffic_measurement.csv', 'a') as f:
writer = csv.writer(f)
size, direction = csv_line.split(',')
writer.writerows([csv_line.split(',')]) '''
print(total_passed_vehicle)
if(targeted_objects=="person"):
compare_last_pedestrian("pedestrian", total_passed_vehicle, time, "the_object_y_axis.avi")
elif(targeted_objects=="car"):
compare_last_vehicle("vehicle", total_passed_vehicle, time, "the_object_y_axis.avi")
cap.release()
cv2.destroyAllWindows()
def compare_last_vehicle(obj, count, date, output_name):
try:
mySQLConnection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = mySQLConnection.cursor()
sql_select_query = """select * from vehicle order by id desc limit 1"""
cursor.execute(sql_select_query)
record = cursor.fetchone()
print(record)
if (record != None):
date_compare = record[2]
print("Just hour w/o minute = ", date_compare[11:-3])
print("Just year/month/day = ", date_compare[0:-6])
if (date_compare[11:-3] == date[11:-3] and date_compare[0:-6] == date[0:-6]):
print("Counted objects sum is the same!!")
else:
saveVehicle(obj, count, date, output_name)
else:
saveVehicle(obj, count, date, output_name)
except mysql.connector.Error as error:
print("Failed to get record from database: {}".format(error))
finally:
# closing database connection.
if (mySQLConnection.is_connected()):
cursor.close()
mySQLConnection.close()
print("connection is closed")
def compare_last_pedestrian(obj, count, date, output_name):
try:
mySQLConnection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = mySQLConnection.cursor()
sql_select_query = """select * from pedestrian order by id desc limit 1"""
cursor.execute(sql_select_query)
record = cursor.fetchone()
print(record)
if (record != None):
date_compare = record[2]
print("Just hour w/o minute = ", date_compare[11:-3])
print("Just year/month/day = ", date_compare[0:-6])
if (date_compare[11:-3] == date[11:-3] and date_compare[0:-6] == date[0:-6]):
print("Counted objects sum is the same!!")
else:
savePedestrian(obj, count, date, output_name)
else:
savePedestrian(obj, count, date, output_name)
except mysql.connector.Error as error:
print("Failed to get record from database: {}".format(error))
finally:
# closing database connection.
if (mySQLConnection.is_connected()):
cursor.close()
mySQLConnection.close()
print("connection is closed")
def rewritePedestrian(obj, count, date, output_name):
try:
connection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = connection.cursor()
sql_insert_query = """ UPDATE pedestrian SET object = %s, count = %s, date = %s, name_text = %s WHERE id IN
(select max(id) from (select * from pedestrian) AS pd)"""
insert_tuple = (obj, count, date, output_name)
result = cursor.execute(sql_insert_query, insert_tuple)
connection.commit()
print("Record inserted successfully into python_users table")
except mysql.connector.Error as error:
connection.rollback() # rollback if any exception occured
print("Failed inserting record into items table {}".format(error))
finally:
# closing database connection.
if (connection.is_connected()):
cursor.close()
connection.close()
print("MySQL connection is closed")
def compare_max_pedestrian(obj, count, date, output_name):
try:
mySQLConnection = mysql.connector.connect(host='localhost',
database='python_items',
user='root',
password='root')
cursor = mySQLConnection.cursor()
sql_select_query = """select * from pedestrian order by id desc limit 1"""
cursor.execute(sql_select_query)
record = cursor.fetchone()
print(record)
if (record != None):
date_compare = record[2]
print("Just hour w/o minute = ", date_compare[11:-3])
print("Just year/month/day = ", date_compare[0:-6])
if (date_compare[11:-3] == date[11:-3] and date_compare[0:-6] == date[0:-6]):
if (int(record[1]) < count):
rewritePedestrian(obj, count, date, output_name)
print("Counted objects sum is the same!!")
else:
savePedestrian(obj, count, date, output_name)
else:
savePedestrian(obj, count, date, output_name)
except mysql.connector.Error as error:
print("Failed to get record from database: {}".format(error))
finally:
# closing database connection.
if (mySQLConnection.is_connected()):
cursor.close()
mySQLConnection.close()
print("connection is closed")
def cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled,
targeted_object, fps, roi, deviation):
# initialize .csv
with open('traffic_measurement.csv', 'w') as f:
writer = csv.writer(f)
csv_line = \
'Vehicle Type/Size, Vehicle Color, Vehicle Movement Direction, Vehicle Speed (km/h)'
writer.writerows([csv_line.split(',')])
# input video
cap = cv2.VideoCapture(input_video)
if cap.isOpened():
# get cap property
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
output_movie = cv2.VideoWriter('the_object_y_axis.avi', fourcc, fps, (width, height))
total_passed_vehicle = 0
speed = "waiting..."
direction = "waiting..."
size = "waiting..."
color = "waiting..."
counting_mode = "..."
width_heigh_taken = True
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# for all the frames that are extracted from input video
while (cap.isOpened()):
ret, frame = cap.read()
if not ret:
print("end of the video file...")
break
# else:
# cv2.imshow('frame', frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
input_frame = frame
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(input_frame, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
# Visualization of the results of a detection.
counter, csv_line, counting_mode = vis_util.visualize_boxes_and_labels_on_image_array_y_axis(cap.get(1),
input_frame,
2,
is_color_recognition_enabled,
np.squeeze(
boxes),
np.squeeze(
classes).astype(
np.int32),
np.squeeze(
scores),
category_index,
targeted_objects=targeted_object,
y_reference=roi,
deviation=deviation,
use_normalized_coordinates=True,
min_score_thresh=.0,
line_thickness=4)
# when the vehicle passed over line and counted, make the color of ROI line green
if counter == 1:
cv2.line(input_frame, (0, roi), (width, roi), (0, 0xFF, 0), 5)
else:
cv2.line(input_frame, (0, roi), (width, roi), (0, 0, 0xFF), 5)
total_passed_vehicle = total_passed_vehicle + counter
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(
input_frame,
'Detected Vehicles: ' + str(total_passed_vehicle),
(10, 35),
font,
0.8,
(0, 0xFF, 0xFF),
2,
cv2.FONT_HERSHEY_SIMPLEX,
)
cv2.putText(
input_frame,
'ROI Line',
(545, roi - 10),
font,
0.6,
(0, 0, 0xFF),
2,
cv2.LINE_AA,
)
cv2.putText(
input_frame,
'LAST PASSED VEHICLE INFO',
(11, 290),
font,
0.5,
(0xFF, 0xFF, 0xFF),
1,
cv2.FONT_HERSHEY_SIMPLEX,
)
cv2.putText(
input_frame,
'-Movement Direction: ' + direction,
(14, 302),
font,
0.4,
(0xFF, 0xFF, 0xFF),
1,
cv2.FONT_HERSHEY_COMPLEX_SMALL,
)
cv2.putText(
input_frame,
'-Speed(km/h): ' + speed,
(14, 312),
font,
0.4,
(0xFF, 0xFF, 0xFF),
1,
cv2.FONT_HERSHEY_COMPLEX_SMALL,
)
cv2.putText(
input_frame,
'-Color: ' + color,
(14, 322),
font,
0.4,
(0xFF, 0xFF, 0xFF),
1,
cv2.FONT_HERSHEY_COMPLEX_SMALL,
)
cv2.putText(
input_frame,
'-Vehicle Size/Type: ' + size,
(14, 332),
font,
0.4,
(0xFF, 0xFF, 0xFF),
1,
cv2.FONT_HERSHEY_COMPLEX_SMALL,
)
output_movie.write(input_frame)
print("writing frame")
cv2.imshow('object counting', input_frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if csv_line != 'not_available':
with open('traffic_measurement.csv', 'a') as f:
writer = csv.writer(f)
(size, direction) = \
csv_line.split(',')
writer.writerows([csv_line.split(',')])
print(total_passed_vehicle)
time = datetime.now().strftime('%Y-%m-%d %H:%M')
date = datetime.now().strftime('%Y-%m-%d')
if(targeted_object=="car"):
compare_last_vehicle("vehicle", total_passed_vehicle, time, "the_object_y_axis.avi")
elif(targeted_object=="person"):
compare_last_pedestrian("pedestrian", total_passed_vehicle, time, "the_object_y_axis.avi")
cap.release()
cv2.destroyAllWindows()
def object_counting(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height):
# initialize .csv
with open('object_counting_report.csv', 'w') as f:
writer = csv.writer(f)
csv_line = "Object Type, Object Color, Object Movement Direction, Object Speed (km/h)"
writer.writerows([csv_line.split(',')])
# input video
cap = cv2.VideoCapture(input_video, cv2.CAP_DSHOW)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
output_movie = cv2.VideoWriter('the_output.avi', fourcc, fps, (width, height))
total_passed_vehicle = 0
speed = "waiting..."
direction = "waiting..."
size = "waiting..."
color = "waiting..."
counting_mode = "..."
width_heigh_taken = True
height = 0
width = 0
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# for all the frames that are extracted from input video
while (cap.isOpened()):
ret, frame = cap.read()
width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
if not ret:
print("end of the video file...")
break
input_frame = frame
# else:
# cv2.imshow('frame', frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(input_frame, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
# Visualization of the results of a detection.
counter, csv_line, counting_mode = vis_util.visualize_boxes_and_labels_on_image_array(cap.get(1),
input_frame,
1,
is_color_recognition_enabled,
np.squeeze(boxes),
np.squeeze(
classes).astype(
np.int32),
np.squeeze(
scores),
category_index,
use_normalized_coordinates=True,
line_thickness=4)
if (len(counting_mode) == 0):
cv2.putText(input_frame, "...", (10, 35), font, 0.8, (0, 255, 255), 2, cv2.FONT_HERSHEY_SIMPLEX)
else:
cv2.putText(input_frame, counting_mode, (10, 35), font, 0.8, (0, 255, 255), 2,
cv2.FONT_HERSHEY_SIMPLEX)
output_movie.write(input_frame)
print("writing frame")
cv2.imshow('object counting', input_frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if (csv_line != "not_available"):
with open('traffic_measurement.csv', 'a') as f:
writer = csv.writer(f)
size, direction = csv_line.split(',')
writer.writerows([csv_line.split(',')])
cap.release()
cv2.destroyAllWindows()
def targeted_object_counting(input_video, detection_graph, category_index, is_color_recognition_enabled,
targeted_object, fps):
# initialize .csv
with open('object_counting_report.csv', 'w') as f:
writer = csv.writer(f)
csv_line = "Object Type, Object Color, Object Movement Direction, Object Speed (km/h)"
writer.writerows([csv_line.split(',')])
# input video
# cap = cv2.VideoCapture(input_video) # recorded video file mode
cap = cv2.VideoCapture(0) # WebCamera mode
if cap.isOpened():
# get cap property
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fourcc = cv2.VideoWriter_fourcc(*'XVID')
output_movie = cv2.VideoWriter('the_output.avi', fourcc, fps, (width, height))
total_passed_vehicle = 0
speed = "waiting..."
direction = "waiting..."
size = "waiting..."
color = "waiting..."
the_result = "..."
width_heigh_taken = True
height = 0
width = 0
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# for all the frames that are extracted from input video
while (cap.isOpened()):
ret, frame = cap.read()
if not ret:
print("end of the video file...")
break
input_frame = frame
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(input_frame, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
# Visualization of the results of a detection.
counter, csv_line, the_result = vis_util.visualize_boxes_and_labels_on_image_array(cap.get(1),
input_frame,
1,
is_color_recognition_enabled,
np.squeeze(boxes),
np.squeeze(
classes).astype(
np.int32),
np.squeeze(scores),
category_index,
targeted_objects=targeted_object,
use_normalized_coordinates=True,
line_thickness=4)
if (len(the_result) == 0):
cv2.putText(input_frame, "...", (10, 35), font, 0.8, (0, 255, 255), 2, cv2.FONT_HERSHEY_SIMPLEX)
else:
cv2.putText(input_frame, the_result, (10, 35), font, 0.8, (0, 255, 255), 2,
cv2.FONT_HERSHEY_SIMPLEX)
if (targeted_object == "person"):
if (len(the_result) > 12):
total_passed_vehicle = int(the_result[11:13])
else:
total_passed_vehicle = int(the_result[11])
elif (targeted_object == "car"):
if (len(the_result) > 9):
total_passed_vehicle = int(the_result[8:10])
else:
total_passed_vehicle = int(the_result[8])
cv2.imshow('object counting', input_frame)
output_movie.write(input_frame)
print("writing frame")
print(total_passed_vehicle)
time = datetime.now().strftime('%Y-%m-%d %H:%M')
if cv2.waitKey(1) & 0xFF == ord('q'):
break
if (csv_line != "not_available"):
with open('traffic_measurement.csv', 'a') as f:
writer = csv.writer(f)
size, direction = csv_line.split(',')
writer.writerows([csv_line.split(',')])
if (targeted_object == "person"):
compare_max_pedestrian("pedestrian", total_passed_vehicle, time, "the_pedestrian_output.avi")
elif (targeted_object == "car"):
print("Will be programmed soon!!")
cap.release()
cv2.destroyAllWindows()
def single_image_object_counting(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width,
height):
total_passed_vehicle = 0
speed = "waiting..."
direction = "waiting..."
size = "waiting..."
color = "waiting..."
counting_mode = "..."
width_heigh_taken = True
height = 0
width = 0
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
input_frame = cv2.imread(input_video)
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(input_frame, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# insert information text to video frame
font = cv2.FONT_HERSHEY_SIMPLEX
# Visualization of the results of a detection.
counter, csv_line, counting_mode = vis_util.visualize_boxes_and_labels_on_single_image_array(1, input_frame,
1,
is_color_recognition_enabled,
np.squeeze(boxes),
np.squeeze(
classes).astype(
np.int32),
np.squeeze(scores),
category_index,
use_normalized_coordinates=True,
line_thickness=4)
if (len(counting_mode) == 0):
cv2.putText(input_frame, "...", (10, 35), font, 0.8, (0, 255, 255), 2, cv2.FONT_HERSHEY_SIMPLEX)
else:
cv2.putText(input_frame, counting_mode, (10, 35), font, 0.8, (0, 255, 255), 2, cv2.FONT_HERSHEY_SIMPLEX)
cv2.imshow('tensorflow_object counting_api', input_frame)
cv2.waitKey(0)
return counting_mode
| 49.692209
| 143
| 0.459431
| 3,493
| 38,909
| 4.904666
| 0.088463
| 0.036773
| 0.026267
| 0.033563
| 0.928146
| 0.921725
| 0.913437
| 0.89505
| 0.881508
| 0.870943
| 0
| 0.021321
| 0.461179
| 38,909
| 782
| 144
| 49.755754
| 0.79585
| 0.089337
| 0
| 0.831386
| 0
| 0.005008
| 0.096559
| 0.009181
| 0
| 0
| 0.003318
| 0
| 0
| 1
| 0.018364
| false
| 0.050083
| 0.015025
| 0
| 0.035058
| 0.065109
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
8eaa3fcf085ce2d4c99f4769b199e92a42a1219d
| 108,714
|
py
|
Python
|
codegen/sub_codegen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
codegen/sub_codegen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
codegen/sub_codegen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
##############################################################################
# Project: arrayfunc
# Purpose: Generate the C code for math subtract operations.
# parameter.
# Language: Python 3.8
# Date: 30-Dec-2017
#
###############################################################################
#
# Copyright 2014 - 2021 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
# ==============================================================================
import itertools
import codegen_common
# ==============================================================================
mathops_head = """//------------------------------------------------------------------------------
// Project: arrayfunc
// Module: %(funclabel)s.c
// Purpose: Calculate the %(funclabel)s of values in an array.
// Language: C
// Date: 15-Nov-2017.
//
//------------------------------------------------------------------------------
//
// Copyright 2014 - 2021 Michael Griffin <m12.griffin@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//------------------------------------------------------------------------------
/*--------------------------------------------------------------------------- */
// This must be defined before "Python.h" in order for the pointers in the
// argument parsing functions to work properly.
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include <limits.h>
#include <math.h>
#include "arrayerrs.h"
#include "arrayparams_base.h"
#include "arrayparams_two.h"
%(includeoptions)s
/*--------------------------------------------------------------------------- */
"""
# ==============================================================================
# For floating point.
ops_op_float = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
ignoreerrors = If true, disable arithmetic math error checking (default is false).
*/
// param_arr_num_none
signed int %(funclabel)s_%(funcmodifier)s_1(Py_ssize_t arraylen, int nosimd, %(arraytype)s *data1, %(arraytype)s param, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_1_simd(arraylen, data1, param);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_1_simd_ovfl(arraylen, data1, param);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
if (!isfinite(data1[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
// param_arr_num_arr
signed int %(funclabel)s_%(funcmodifier)s_2(Py_ssize_t arraylen, int nosimd, %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_2_simd(arraylen, data1, param, data3);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_2_simd_ovfl(arraylen, data1, param, data3);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
if (!isfinite(data3[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
// param_num_arr_none
signed int %(funclabel)s_%(funcmodifier)s_3(Py_ssize_t arraylen, int nosimd, %(arraytype)s param, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_3_simd(arraylen, param, data2);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_3_simd_ovfl(arraylen, param, data2);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
if (!isfinite(data2[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
// param_num_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_4(Py_ssize_t arraylen, int nosimd, %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_4_simd(arraylen, param, data2, data3);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_4_simd_ovfl(arraylen, param, data2, data3);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
if (!isfinite(data3[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
// param_arr_arr_none
signed int %(funclabel)s_%(funcmodifier)s_5(Py_ssize_t arraylen, int nosimd, %(arraytype)s *data1, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_5_simd(arraylen, data1, data2);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_5_simd_ovfl(arraylen, data1, data2);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
if (!isfinite(data1[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
// param_arr_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_6(Py_ssize_t arraylen, int nosimd, %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simdplatform)s
signed int errorstate;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_6_simd(arraylen, data1, data2, data3);
} else {
errorstate = %(funclabel)s_%(funcmodifier)s_6_simd_ovfl(arraylen, data1, data2, data3);
if (errorstate) {return ARR_ERR_ARITHMETIC;}
}
} else {
#endif
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
if (!isfinite(data3[x])) {return ARR_ERR_ARITHMETIC;}
}
}
%(simdplatform)s
}
#endif
return ARR_NO_ERR;
}
"""
# ==============================================================================
# For unsigned integer.
ops_sub_uint = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
ignoreerrors = If true, disable arithmetic math error checking (default is false).
*/
// param_arr_num_none
signed int %(funclabel)s_%(funcmodifier)s_1(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s param, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_1_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], param) ) {return ARR_ERR_OVFL;}
data1[x] = data1[x] %(copname)s param;
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_arr_num_arr
signed int %(funclabel)s_%(funcmodifier)s_2(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_2_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], param) ) {return ARR_ERR_OVFL;}
data3[x] = data1[x] %(copname)s param;
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_num_arr_none
signed int %(funclabel)s_%(funcmodifier)s_3(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s param, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_3_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(param, data2[x]) ) {return ARR_ERR_OVFL;}
data2[x] = param %(copname)s data2[x];
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_num_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_4(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_4_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(param, data2[x]) ) {return ARR_ERR_OVFL;}
data3[x] = param %(copname)s data2[x];
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_arr_arr_none
signed int %(funclabel)s_%(funcmodifier)s_5(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_5_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return ARR_ERR_OVFL;}
data1[x] = data1[x] %(copname)s data2[x];
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_arr_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_6(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(simd_call_6_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return ARR_ERR_OVFL;}
data3[x] = data1[x] %(copname)s data2[x];
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
"""
# ==============================================================================
# ==============================================================================
# For signed integer.
ops_sub_int = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
ignoreerrors = If true, disable arithmetic math error checking (default is false).
*/
// param_arr_num_none
signed int %(funclabel)s_%(funcmodifier)s_1(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s param, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(arraytype)s ovlimit;
%(simd_call_1_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
// If the parameter is zero, we can take a shortcut.
if (param == 0) {
return ARR_NO_ERR;
}
if (param > 0) {
ovlimit = pos_ovlimit_12_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( pos_willoverflow(data1[x], ovlimit) ) {return ARR_ERR_OVFL;}
data1[x] = data1[x] %(copname)s param;
}
}
if (param < 0) {
ovlimit = neg_ovlimit_12_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( neg_willoverflow(data1[x], ovlimit) ) {return ARR_ERR_OVFL;}
data1[x] = data1[x] %(copname)s param;
}
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_arr_num_arr
signed int %(funclabel)s_%(funcmodifier)s_2(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(arraytype)s ovlimit;
%(simd_call_2_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
}
} else {
// Math error checking enabled.
if (param == 0) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x];
}
}
if (param > 0) {
ovlimit = pos_ovlimit_12_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( pos_willoverflow(data1[x], ovlimit) ) {return ARR_ERR_OVFL;}
data3[x] = data1[x] %(copname)s param;
}
}
if (param < 0) {
ovlimit = neg_ovlimit_12_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( neg_willoverflow(data1[x], ovlimit) ) {return ARR_ERR_OVFL;}
data3[x] = data1[x] %(copname)s param;
}
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_num_arr_none
signed int %(funclabel)s_%(funcmodifier)s_3(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s param, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(arraytype)s ovlimit;
%(simd_call_3_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
// If the parameter is zero, we can take a shortcut.
if (param == 0) {
for (x = 0; x < arraylen; x++) {
if (data2[x] == %(intminvalue)s) {return ARR_ERR_OVFL;}
data2[x] = -data2[x];
}
}
if (param > 0) {
ovlimit = pos_ovlimit_34_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( pos_willoverflow(data2[x], ovlimit) ) {return ARR_ERR_OVFL;}
data2[x] = param %(copname)s data2[x];
}
}
if (param < 0) {
ovlimit = neg_ovlimit_34_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( neg_willoverflow(data2[x], ovlimit) ) {return ARR_ERR_OVFL;}
data2[x] = param %(copname)s data2[x];
}
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_num_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_4(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
%(arraytype)s ovlimit;
%(simd_call_4_ovfl)s
// Non-SIMD version.
// Math error checking disabled.
if (ignoreerrors) {
for (x = 0; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
}
} else {
// Math error checking enabled.
// If the parameter is zero, we can take a shortcut.
if (param == 0) {
for (x = 0; x < arraylen; x++) {
if (data2[x] == %(intminvalue)s) {return ARR_ERR_OVFL;}
data3[x] = -data2[x];
}
}
if (param > 0) {
ovlimit = pos_ovlimit_34_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( pos_willoverflow(data2[x], ovlimit) ) {return ARR_ERR_OVFL;}
data3[x] = param %(copname)s data2[x];
}
}
if (param < 0) {
ovlimit = neg_ovlimit_34_%(funcmodifier)s(param);
for (x = 0; x < arraylen; x++) {
if ( neg_willoverflow(data2[x], ovlimit) ) {return ARR_ERR_OVFL;}
data3[x] = param %(copname)s data2[x];
}
}
}
%(simd_call_close)s
return ARR_NO_ERR;
}
// param_arr_arr_none
signed int %(funclabel)s_%(funcmodifier)s_5(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s *data2, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
// Math error checking disabled.
if (ignoreerrors) {
%(simd_call_5)s
for (x = 0; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return ARR_ERR_OVFL;}
data1[x] = data1[x] %(copname)s data2[x];
}
}
return ARR_NO_ERR;
}
// param_arr_arr_arr
signed int %(funclabel)s_%(funcmodifier)s_6(Py_ssize_t arraylen,%(nosimddecl)s %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3, unsigned int ignoreerrors) {
// array index counter.
Py_ssize_t x;
// Math error checking disabled.
if (ignoreerrors) {
%(simd_call_6)s
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
}
} else {
// Math error checking enabled.
for (x = 0; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return ARR_ERR_OVFL;}
data3[x] = data1[x] %(copname)s data2[x];
}
}
return ARR_NO_ERR;
}
"""
# ==============================================================================
# Helper functions for SIMD support. There needs to be one for each data type.
simd_helpers = """
/*--------------------------------------------------------------------------- */
/* Initialise an SIMD vector with a specifired value.
initval = The value to initialise the vector to.
Returns the initalised SIMD vector.
*/
%(simdplatform)s
%(simdattr)s initvec_%(funcmodifier)s(%(arraytype)s initval) {
unsigned int y;
%(arraytype)s initvals[%(simdwidth)s];
%(simdattr)s simdvec;
for (y = 0; y < %(simdwidth)s; y++) {
initvals[y] = initval;
}
simdvec = %(vldinstr)s(initvals));
return simdvec;
}
#endif
"""
# This should be created once only as it is not type dependent.
# It also includes the parameter descriptions for the type dependent
# macros, so it needs to appear first.
int_ovcheck = """
/*--------------------------------------------------------------------------- */
/* The integer overflow limit check.
val = The parameter value being checked.
ovlimit = The previously calculated overflow limit.
Returns True if overflow will happen.
*/
// For when ovlimit was calculated on a positive value (pos_ovlimit_12_).
#define pos_willoverflow(val, ovlimit) ( val < ovlimit )
// For when ovlimit was calculated on a negative value (neg_ovlimit_12_).
#define neg_willoverflow(val, ovlimit) ( val > ovlimit )
/*--------------------------------------------------------------------------- */
/* ovlimit_*
Calculate the maximum value an integer can be without overflowing.
This is used for equations where we need to know the maximum value
(magnitude for either +ve or -ve) which can be used in a calculation
without it overflowing.
val = The parameter value being checked.
Returns the overflow limit.
loop_willoverflow_*
This combined ovlimit and pos_willoverflow and neg_willoverflow. Use
this in loops where both sides of the equation are arraya and the
limit must be recalculated every iteration.
lval, rval = The respective current values of the arrays.
Returns True if the current operation will result in an integer overflow.
*/"""
# Create this for each signed integers type.
intov_macros_signed = """
/*--------------------------------------------------------------------------- */
// For %(arraytype)s.
// For when val is positive and the form is (array - param). Use when called in loops.
#define pos_ovlimit_12_%(funcmodifier)s(val) %(intminvalue)s + val
// For when val is negative and the form is (array - param). Use when called in loops.
#define neg_ovlimit_12_%(funcmodifier)s(val) %(intmaxvalue)s + val
// For when val is positive and the form is (param - array). Use when called in loops.
#define pos_ovlimit_34_%(funcmodifier)s(val) val - %(intmaxvalue)s
// For when val is negative and the form is (param - array). Use when called in loops.
#define neg_ovlimit_34_%(funcmodifier)s(val) val - %(intminvalue)s;
// For use in loops when both parameters are arrays and are changing.
#define loop_willoverflow_%(funcmodifier)s(lval, rval) \\
(((rval > 0) && (lval < (%(intminvalue)s + rval))) \\
|| ((rval < 0) && (lval > (%(intmaxvalue)s + rval))))
"""
# Create this for each unsigned integers type.
intov_macros_unsigned = """
/*--------------------------------------------------------------------------- */
// For %(arraytype)s.
// For unsigned only. Can use in loops and outside loops.
#define loop_willoverflow_%(funcmodifier)s(lval, rval) (lval < rval)
"""
# ==============================================================================
# ==============================================================================
# The template for overflow checks for x86_64. This requires the correct SIMD attribute
# to be insertered before itself being inserted into the next template.
# Used to construct x86 overflow detection.
simd_ovcheck_x86 = '''
// Check for overflow.
if (!(__builtin_ia32_pmovmskb128((v16qi) ovcheck) == 0x0000)) {'''
simd_pos_willoverflow_12_x86 = '''// Do a less than compare operation by swapping the arguments.
ovcheck = %(vgtinstr)s (ovflvec, datasliceleft);''' + simd_ovcheck_x86
simd_pos_willoverflow_34_x86 = '''// Do a less than compare operation by swapping the arguments.
ovcheck = %(vgtinstr)s (ovflvec, datasliceright);''' + simd_ovcheck_x86
simd_neg_willoverflow_12_x86 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceleft, ovflvec);''' + simd_ovcheck_x86
simd_neg_willoverflow_34_x86 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_x86
simd_equ_willoverflow_x86 = '''// Do an equal compare operation.
ovcheck = %(veqinstr)s (ovflvec, datasliceright);''' + simd_ovcheck_x86
# The template for overflow checks for ARM NEON ARMv7 32 bit. This requires the correct SIMD size
# and sign (e.g. u8, s16, etc.) to be insertered before itself being inserted into the next template.
# Used to construct armv7 overflow detection.
simd_ovcheck_armv7 = '''
// Check for overflow.
if (!(%(vreinterpinstr)s(ovcheck) == 0x0000000000000000)) {'''
simd_pos_willoverflow_12_armv7 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceleft, ovflvec);''' + simd_ovcheck_armv7
simd_pos_willoverflow_34_armv7 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv7
simd_neg_willoverflow_12_armv7 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceleft, ovflvec);''' + simd_ovcheck_armv7
simd_neg_willoverflow_34_armv7 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv7
simd_equ_willoverflow_armv7 = '''// Do an equal compare operation.
ovcheck = %(veqinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv7
simd_unsigned_willoverflow_armv7 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceleft, datasliceright);''' + simd_ovcheck_armv7
# The template for overflow checks for ARM NEON ARMv8 64 bit. This requires the correct SIMD size
# and sign (e.g. u8, s16, etc.) to be insertered before itself being inserted into the next template.
# Used to construct armv8 overflow detection.
simd_ovcheck_armv8 = '''
// Check for overflow.
// Combine the result to two 64 bit vectors.
veccombine = %(vreinterpinstr)s(ovcheck);
// Get the high and low lanes of the combined vector.
lowresult = vgetq_lane_u64(veccombine, 0);
highresult = vgetq_lane_u64(veccombine, 1);
// Check if overflow will happen.
if ((lowresult != 0x0000000000000000) || (highresult != 0x0000000000000000)) {'''
simd_pos_willoverflow_12_armv8 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceleft, ovflvec);''' + simd_ovcheck_armv8
simd_pos_willoverflow_34_armv8 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv8
simd_neg_willoverflow_12_armv8 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceleft, ovflvec);''' + simd_ovcheck_armv8
simd_neg_willoverflow_34_armv8 = '''// Do a greater than compare operation.
ovcheck = %(vgtinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv8
simd_equ_willoverflow_armv8 = '''// Do an equal compare operation.
ovcheck = %(veqinstr)s (datasliceright, ovflvec);''' + simd_ovcheck_armv8
simd_unsigned_willoverflow_armv8 = '''// Do a less than compare operation.
ovcheck = %(vltinstr)s (datasliceleft, datasliceright);''' + simd_ovcheck_armv8
simd_ovflchk_extravars_armv8 = '''uint64x2_t veccombine;
uint64_t highresult, lowresult;'''
# ==============================================================================
# ==============================================================================
# The operations using SIMD. This handles multiple different SIMD operations.
# This version does not check for overflow.
ops_simdsupport = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
This version is without overflow checking.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
*/
// param_arr_num_none
%(simdplatform)s
void %(funclabel)s_%(funcmodifier)s_1_simd(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
}
}
// param_arr_num_arr
void %(funclabel)s_%(funcmodifier)s_2_simd(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
}
}
// param_num_arr_none
void %(funclabel)s_%(funcmodifier)s_3_simd(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
}
}
// param_num_arr_arr
void %(funclabel)s_%(funcmodifier)s_4_simd(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
}
}
// param_arr_arr_none
void %(funclabel)s_%(funcmodifier)s_5_simd(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
}
}
// param_arr_arr_arr
void %(funclabel)s_%(funcmodifier)s_6_simd(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
}
}
#endif
/*--------------------------------------------------------------------------- */
"""
# ==============================================================================
# The signed operations using SIMD. This handles multiple different SIMD operations.
# This version checks for overflow but does NOT do array to array.
ops_simdsupport_ovfl_signed = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
This version supports overflow checking.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
Returns 1 if overflow occurred, else returns 0.
*/
// param_arr_num_none
%(simdplatform)s
char %(funclabel)s_%(funcmodifier)s_1_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(arraytype)s ovlimit;
%(simdattr)s datasliceleft, datasliceright, resultslice, ovflvec;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// We don't need to do anything if param is zero.
if (param == 0) {
return 0;
}
// Initialise the param values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// param is positive.
if (param > 0) {
// Used to calculate overflow.
ovlimit = pos_ovlimit_12_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_pos_willoverflow_12)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( pos_willoverflow(data1[x], ovlimit) ) {return 1;}
data1[x] = data1[x] - param;
}
}
// param is negative.
if (param < 0) {
// Used to calculate overflow.
ovlimit = neg_ovlimit_12_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_neg_willoverflow_12)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( neg_willoverflow(data1[x], ovlimit) ) {return 1;}
data1[x] = data1[x] - param;
}
}
return 0;
}
// param_arr_num_arr
char %(funclabel)s_%(funcmodifier)s_2_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(arraytype)s ovlimit;
%(simdattr)s datasliceleft, datasliceright, resultslice, ovflvec;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// We don't need to do anything if param is zero, just copy the data.
if (param == 0) {
for (x = 0; x < arraylen; x++) {
data3[x] = data1[x];
}
return 0;
}
// Initialise the param values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// param is positive.
if (param > 0) {
// Used to calculate overflow.
ovlimit = pos_ovlimit_12_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_pos_willoverflow_12)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( pos_willoverflow(data1[x], ovlimit) ) {return 1;}
data3[x] = data1[x] - param;
}
}
// param is negative.
if (param < 0) {
// Used to calculate overflow.
ovlimit = neg_ovlimit_12_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_neg_willoverflow_12)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( neg_willoverflow(data1[x], ovlimit) ) {return 1;}
data3[x] = data1[x] - param;
}
}
return 0;
}
// param_num_arr_none
char %(funclabel)s_%(funcmodifier)s_3_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(arraytype)s ovlimit;
%(simdattr)s datasliceleft, datasliceright, resultslice, ovflvec;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
%(vsignparam)s
// Initialise the param values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// If the parameter is zero, we can take a shortcut.
if (param == 0) {
// Used to calculate overflow.
ovlimit = %(intminvalue)s;
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_equ_willoverflow)s
return 1;
}
// The actual SIMD operation. Since we are subtracting from
// zero we simply negate it.
resultslice = %(vneginstr)s;
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if (data2[x] == ovlimit) {return 1;}
data2[x] = -data2[x];
}
}
// param is positive.
if (param > 0) {
// Used to calculate overflow.
ovlimit = pos_ovlimit_34_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_pos_willoverflow_34)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if (pos_willoverflow( data2[x], ovlimit) ) {return 1;}
data2[x] = param - data2[x];
}
}
// param is negative.
if (param < 0) {
// Used to calculate overflow.
ovlimit = neg_ovlimit_34_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_neg_willoverflow_34)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( neg_willoverflow(data2[x], ovlimit) ) {return 1;}
data2[x] = param - data2[x];
}
}
return 0;
}
// param_num_arr_arr
char %(funclabel)s_%(funcmodifier)s_4_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(arraytype)s ovlimit;
%(simdattr)s datasliceleft, datasliceright, resultslice, ovflvec;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
%(vsignparam)s
// Initialise the param values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// If the parameter is zero, we can take a shortcut.
if (param == 0) {
// Used to calculate overflow.
ovlimit = %(intminvalue)s;
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_equ_willoverflow)s
return 1;
}
// The actual SIMD operation. Since we are subtracting from
// zero we simply negate it.
resultslice = %(vneginstr)s;
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if (data2[x] == ovlimit) {return 1;}
data3[x] = -data2[x];
}
}
// param is positive.
if (param > 0) {
// Used to calculate overflow.
ovlimit = pos_ovlimit_34_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_pos_willoverflow_34)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( pos_willoverflow(data2[x], ovlimit) ) {return 1;}
data3[x] = param - data2[x];
}
}
// param is negative.
if (param < 0) {
// Used to calculate overflow.
ovlimit = neg_ovlimit_34_%(funcmodifier)s(param);
// This is used for detecting a potential overflow condition.
ovflvec = initvec_%(funcmodifier)s(ovlimit);
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_neg_willoverflow_34)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( neg_willoverflow(data2[x], ovlimit) ) {return 1;}
data3[x] = param - data2[x];
}
}
return 0;
}
#endif
"""
# ==============================================================================
# The unsigned operations using SIMD. This handles multiple different SIMD operations.
# This version checks for overflow and DOES do array to array.
ops_simdsupport_ovfl_unsigned = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
This version is with overflow checking.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
*/
// param_arr_num_none
%(simdplatform)s
char %(funclabel)s_%(funcmodifier)s_1_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], param) ) {return 1;}
data1[x] = data1[x] - param;
}
return 0;
}
// param_arr_num_arr
char %(funclabel)s_%(funcmodifier)s_2_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], param) ) {return 1;}
data3[x] = data1[x] - param;
}
return 0;
}
// param_num_arr_none
char %(funclabel)s_%(funcmodifier)s_3_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(param, data2[x]) ) {return 1;}
data2[x] = param - data2[x];
}
return 0;
}
// param_num_arr_arr
char %(funclabel)s_%(funcmodifier)s_4_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(param, data2[x]) ) {return 1;}
data3[x] = param - data2[x];
}
return 0;
}
// param_arr_arr_none
char %(funclabel)s_%(funcmodifier)s_5_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return 1;}
data1[x] = data1[x] - data2[x];
}
return 0;
}
// param_arr_arr_arr
char %(funclabel)s_%(funcmodifier)s_6_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice;
%(ovflsimdattr)s ovcheck;
%(simd_ovflchk_extravars)s
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// Check for overflow.
%(simd_unsigned_willoverflow)s
return 1;
}
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
}
// Handle the values left over at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
if ( loop_willoverflow_%(funcmodifier)s(data1[x], data2[x]) ) {return 1;}
data3[x] = data1[x] - data2[x];
}
return 0;
}
#endif
/*--------------------------------------------------------------------------- */
"""
# ==============================================================================
# ==============================================================================
# The floating point operations using SIMD. This includes overflow conditions.
ops_simdsupport_ovfl_float = """
/*--------------------------------------------------------------------------- */
/* The following series of functions reflect the different parameter options possible.
This version is without overflow checking.
arraylen = The length of the data arrays.
data1 = The first data array.
data2 = The second data array.
data3 = The third data array.
param = The parameter to be applied to each array element.
Returns 1 if overflow occurred, else returns 0.
*/
// param_arr_num_none
%(simdplatform)s
char %(funclabel)s_%(funcmodifier)s_1_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s param;
if (!isfinite(data1[x])) {return 1;}
}
return 0;
}
// param_arr_num_arr
char %(funclabel)s_%(funcmodifier)s_2_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s param, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// Initialise the comparison values.
datasliceright = initvec_%(funcmodifier)s(param);
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s param;
if (!isfinite(data3[x])) {return 1;}
}
return 0;
}
// param_num_arr_none
char %(funclabel)s_%(funcmodifier)s_3_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data2[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data2[x] = param %(copname)s data2[x];
if (!isfinite(data2[x])) {return 1;}
}
return 0;
}
// param_num_arr_arr
char %(funclabel)s_%(funcmodifier)s_4_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s param, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// Initialise the comparison values.
datasliceleft = initvec_%(funcmodifier)s(param);
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = param %(copname)s data2[x];
if (!isfinite(data3[x])) {return 1;}
}
return 0;
}
// param_arr_arr_none
char %(funclabel)s_%(funcmodifier)s_5_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data1[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data1[x] = data1[x] %(copname)s data2[x];
if (!isfinite(data1[x])) {return 1;}
}
return 0;
}
// param_arr_arr_arr
char %(funclabel)s_%(funcmodifier)s_6_simd_ovfl(Py_ssize_t arraylen, %(arraytype)s *data1, %(arraytype)s *data2, %(arraytype)s *data3) {
// array index counter.
Py_ssize_t x;
// SIMD related variables.
Py_ssize_t alignedlength;
%(simdattr)s datasliceleft, datasliceright, resultslice, checkslice;
%(arraytype)s checkvecresults[%(simdwidth)s];
%(arraytype)s checksliceinit[%(simdwidth)s] = {0.0};
// This is used to check for errors by accumulating non-finite values.
checkslice = %(vldinstr)s checksliceinit);
// Calculate array lengths for arrays whose lengths which are not even
// multipes of the SIMD slice length.
alignedlength = calcalignedlength(arraylen, %(simdwidth)s);
// Perform the main operation using SIMD instructions.
for (x = 0; x < alignedlength; x += %(simdwidth)s) {
// Load the data into the vector register.
datasliceleft = %(vldinstr)s &data1[x]);
datasliceright = %(vldinstr)s &data2[x]);
// The actual SIMD operation.
resultslice = %(vopinstr)s(datasliceleft, datasliceright);
// Store the result.
%(vstinstr1)s &data3[x], %(vstinstr2)s resultslice);
// Check the result. None-finite errors should accumulate.
checkslice = %(simdmul)s(checkslice, resultslice);
}
// Check the results of the SIMD operations. If all is OK then the
// results should be all zeros. Any none-finite numbers however will
// propagate through and accumulate.
%(vstinstr1)s checkvecresults, checkslice);
for (x = 0; x < %(simdwidth)s; x++) {
if (!isfinite(checkvecresults[x])) {return 1;}
}
// Get the max value within the left over elements at the end of the array.
for (x = alignedlength; x < arraylen; x++) {
data3[x] = data1[x] %(copname)s data2[x];
if (!isfinite(data3[x])) {return 1;}
}
return 0;
}
#endif
/*--------------------------------------------------------------------------- */
"""
# ==============================================================================
# ==============================================================================
# This is the set of function calls used to call each operator function.
opscall = """
// %(funcmodifier)s
case '%(arraycode)s' : {
switch (arraydata.paramcat) {
case param_arr_num_none : {
resultcode = %(funclabel)s_%(funcmodifier)s_1(arraydata.arraylength,%(nosimdparam)s arraydata.array1.%(arraycode)s, arraydata.param.%(arraycode)s, arraydata.ignoreerrors);
break;
}
case param_arr_num_arr : {
resultcode = %(funclabel)s_%(funcmodifier)s_2(arraydata.arraylength,%(nosimdparam)s arraydata.array1.%(arraycode)s, arraydata.param.%(arraycode)s, arraydata.array3.%(arraycode)s, arraydata.ignoreerrors);
break;
}
case param_num_arr_none : {
resultcode = %(funclabel)s_%(funcmodifier)s_3(arraydata.arraylength,%(nosimdparam)s arraydata.param.%(arraycode)s, arraydata.array2.%(arraycode)s, arraydata.ignoreerrors);
break;
}
case param_num_arr_arr : {
resultcode = %(funclabel)s_%(funcmodifier)s_4(arraydata.arraylength,%(nosimdparam)s arraydata.param.%(arraycode)s, arraydata.array2.%(arraycode)s, arraydata.array3.%(arraycode)s, arraydata.ignoreerrors);
break;
}
case param_arr_arr_none : {
resultcode = %(funclabel)s_%(funcmodifier)s_5(arraydata.arraylength,%(nosimdparam)s arraydata.array1.%(arraycode)s, arraydata.array2.%(arraycode)s, arraydata.ignoreerrors);
break;
}
case param_arr_arr_arr : {
resultcode = %(funclabel)s_%(funcmodifier)s_6(arraydata.arraylength,%(nosimdparam)s arraydata.array1.%(arraycode)s, arraydata.array2.%(arraycode)s, arraydata.array3.%(arraycode)s, arraydata.ignoreerrors);
break;
}
}
break;
}
"""
# ==============================================================================
mathops_params = """
/*--------------------------------------------------------------------------- */
/* The wrapper to the underlying C function */
static PyObject *py_%(funclabel)s(PyObject *self, PyObject *args, PyObject *keywds) {
// The error code returned by the function.
signed int resultcode = -1;
// This is used to hold the parsed parameters.
struct args_params_2 arraydata = ARGSINIT_TWO;
// -----------------------------------------------------
// Get the parameters passed from Python.
arraydata = getparams_two(self, args, keywds, 1, %(getsimdparam)s, "%(funclabel)s");
// If there was an error, we count on the parameter parsing function to
// release the buffers if this was necessary.
if (arraydata.error) {
return NULL;
}
// Call the C function.
switch(arraydata.arraytype) {
%(opscall)s
// Wrong array type code.
default: {
releasebuffers_two(arraydata);
ErrMsgTypeExpectFloat();
return NULL;
break;
}
}
// Release the buffers.
releasebuffers_two(arraydata);
// Signal the errors.
if (resultcode == ARR_ERR_ZERODIV) {
ErrMsgZeroDiv();
return NULL;
}
if (resultcode == ARR_ERR_ARITHMETIC) {
ErrMsgArithCalc();
return NULL;
}
if (resultcode == ARR_ERR_OVFL) {
ErrMsgArithOverflowCalc();
return NULL;
}
// Everything was successful.
Py_RETURN_NONE;
}
/*--------------------------------------------------------------------------- */
/* The module doc string */
PyDoc_STRVAR(%(funclabel)s__doc__,
"%(funclabel)s \\n\\
_____________________________ \\n\\
\\n\\
Calculate %(funclabel)s over the values in an array. \\n\\
\\n\\
====================== ============================================== \\n\\
Equivalent to: [x %(pyoperator)s param for x in array1] \\n\\
or [param %(pyoperator)s y for y in array2] \\n\\
or [x %(pyoperator)s y for x, y in zip(array1, array2)] \\n\\
====================== ============================================== \\n\\
\\n\\
====================== ============================================== \\n\\
Array types supported: %(supportedarrays)s \\n\\
Exceptions raised: %(matherrors)s \\n\\
====================== ============================================== \\n\\
\\n\\
Call formats: \\n\\
\\n\\
%(funclabel)s(array1, param) \\n\\
%(funclabel)s(array1, param, outparray) \\n\\
%(funclabel)s(param, array1) \\n\\
%(funclabel)s(param, array1, outparray) \\n\\
%(funclabel)s(array1, array2) \\n\\
%(funclabel)s(array1, array2, outparray) \\n\\
%(funclabel)s(array1, param, maxlen=y) \\n\\
%(funclabel)s(array1, param, matherrors=False) \\n\\
%(helpsimd1)s\\n\\
\\n\\
* array1 - The first input data array to be examined. If no output \\n\\
array is provided the results will overwrite the input data. \\n\\
* param - A non-array numeric parameter. \\n\\
* array2 - A second input data array. Each element in this array is \\n\\
applied to the corresponding element in the first array. \\n\\
* outparray - The output array. This parameter is optional. \\n\\
* maxlen - Limit the length of the array used. This must be a valid \\n\\
positive integer. If a zero or negative length, or a value which is \\n\\
greater than the actual length of the array is specified, this \\n\\
parameter is ignored. \\n\\
* matherrors - If true, arithmetic error checking is disabled. The \\n\\
default is false. \\n\\
%(helpsimd2)s");
/*--------------------------------------------------------------------------- */
/* A list of all the methods defined by this module.
"%(funclabel)s" is the name seen inside of Python.
"py_%(funclabel)s" is the name of the C function handling the Python call.
"METH_VARGS" tells Python how to call the handler.
The {NULL, NULL} entry indicates the end of the method definitions. */
static PyMethodDef %(funclabel)s_methods[] = {
{"%(funclabel)s", (PyCFunction)py_%(funclabel)s, METH_VARARGS | METH_KEYWORDS, %(funclabel)s__doc__},
{NULL, NULL, 0, NULL}
};
static struct PyModuleDef %(funclabel)smodule = {
PyModuleDef_HEAD_INIT,
"%(funclabel)s",
NULL,
-1,
%(funclabel)s_methods
};
PyMODINIT_FUNC PyInit_%(funclabel)s(void)
{
return PyModule_Create(&%(funclabel)smodule);
};
/*--------------------------------------------------------------------------- */
"""
# ==============================================================================
# ==============================================================================
# This is required for SIMD operations only.
includeoptions_both = '''#include "simddefs.h"
#ifdef AF_HASSIMD_X86
#include "%(funclabel)s_simd_x86.h"
#endif
#if defined(AF_HASSIMD_ARMv7_32BIT) || defined(AF_HASSIMD_ARM_AARCH64)
#include "arm_neon.h"
#endif
#if defined(AF_HASSIMD_ARMv7_32BIT)
#include "%(funclabel)s_simd_armv7.h"
#endif
#if defined(AF_HASSIMD_ARM_AARCH64)
#include "%(funclabel)s_simd_armv8.h"
#endif
'''
# This is required for SIMD operations only.
includeoptions_arm = '''#include "simddefs.h"
#if defined(AF_HASSIMD_ARMv7_32BIT) || defined(AF_HASSIMD_ARM_AARCH64)
#include "arm_neon.h"
#endif
#if defined(AF_HASSIMD_ARMv7_32BIT)
#include "%(funclabel)s_simd_armv7.h"
#endif
#if defined(AF_HASSIMD_ARM_AARCH64)
#include "%(funclabel)s_simd_armv8.h"
#endif
'''
# SIMD call template. This has to handle multiple template strings,
# and so is presented as a dictionary to allow it to be handled
# iteratively.
SIMD_call = {
'simd_call_1' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_1_simd(arraylen, data1, param);
return ARR_NO_ERR;
}
#endif\n''',
'simd_call_2' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_2_simd(arraylen, data1, param, data3);
return ARR_NO_ERR;
}
#endif\n''',
'simd_call_3' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_3_simd(arraylen, param, data2);
return ARR_NO_ERR;
}
#endif\n''',
'simd_call_4' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_4_simd(arraylen, param, data2, data3);
return ARR_NO_ERR;
}
#endif\n''',
'simd_call_5' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_5_simd(arraylen, data1, data2);
return ARR_NO_ERR;
}
#endif\n''',
'simd_call_6' : '''\n%(simdplatform)s
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
%(funclabel)s_%(funcmodifier)s_6_simd(arraylen, data1, data2, data3);
return ARR_NO_ERR;
}
#endif\n'''
}
# This is used to insert the "nosimd" parameter in parameter declarations.
nosimddecl = ' int nosimd,'
# This one is used in the actual function call.
nosimdparam = ' arraydata.nosimd,'
# The following are used to fill in template data which handles whether
# a function requires SIMD related template data or not.
helpsimd1_template = ' %(funclabel)s(array, param, nosimd=False)'
helpsimd2_template = '''* nosimd - If True, SIMD acceleration is disabled. This parameter is \\n\\
optional. The default is FALSE. \\n\\n'''
# ==============================================================================
# ==============================================================================
# SIMD call template for overflow checking. This has to handle multiple
# template strings, and so is presented as a dictionary to allow it to be handled
# iteratively.
SIMD_call_ovfl = {
'simd_call_1_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_1_simd(arraylen, data1, param);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_1_simd_ovfl(arraylen, data1, param);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_2_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_2_simd(arraylen, data1, param, data3);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_2_simd_ovfl(arraylen, data1, param, data3);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_3_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_3_simd(arraylen, param, data2);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_3_simd_ovfl(arraylen, param, data2);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_4_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_4_simd(arraylen, param, data2, data3);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_4_simd_ovfl(arraylen, param, data2, data3);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_5_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_5_simd(arraylen, data1, data2);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_5_simd_ovfl(arraylen, data1, data2);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_6_ovfl' : '''\n%(simdplatform)s
char ovflresult;
// SIMD version.
if (!nosimd && enoughforsimd(arraylen, %(simdwidth)s)) {
// Math error checking disabled.
if (ignoreerrors) {
%(funclabel)s_%(funcmodifier)s_6_simd(arraylen, data1, data2, data3);
} else {
// Math error checking enabled.
ovflresult = %(funclabel)s_%(funcmodifier)s_6_simd_ovfl(arraylen, data1, data2, data3);
if (ovflresult) { return ARR_ERR_OVFL; }
}
} else {
#endif\n''',
'simd_call_close' : '''\n%(simdplatform)s
}
#endif\n''',
}
# ==============================================================================
# ==============================================================================
# Various SIMD instruction information which varies according to array type.
# For x86-64.
simdattr_x86 = {
'b' : 'v16qi',
'B' : 'v16qi',
'h' : 'v8hi',
'H' : 'v8hi',
'i' : 'v4si',
'I' : 'v4si',
'f' : 'v4sf',
'd' : 'v2df',
}
ovflsimdattr_x86 = {
'b' : 'v16qi',
'B' : 'v16qi',
'h' : 'v8hi',
'H' : 'v8hi',
'i' : 'v4si',
'I' : 'v4si',
'f' : 'v4sf',
'd' : 'v2df',
}
vldinstr_x86 = {
'b' : '(v16qi) __builtin_ia32_lddqu((char *) ',
'B' : '(v16qi) __builtin_ia32_lddqu((char *) ',
'h' : '(v8hi) __builtin_ia32_lddqu((char *) ',
'H' : '(v8hi) __builtin_ia32_lddqu((char *) ',
'i' : '(v4si) __builtin_ia32_lddqu((char *) ',
'I' : '(v4si) __builtin_ia32_lddqu((char *) ',
'f' : '(v4sf) __builtin_ia32_loadups(',
'd' : '(v2df) __builtin_ia32_loadupd(',
}
vstinstr1_x86 = {
'b' : '__builtin_ia32_storedqu((char *)',
'B' : '__builtin_ia32_storedqu((char *)',
'h' : '__builtin_ia32_storedqu((char *)',
'H' : '__builtin_ia32_storedqu((char *)',
'i' : '__builtin_ia32_storedqu((char *)',
'I' : '__builtin_ia32_storedqu((char *)',
'f' : '__builtin_ia32_storeups(',
'd' : '__builtin_ia32_storeupd(',
}
vstinstr2_x86 = {
'b' : '',
'B' : '',
'h' : '(v16qi) ',
'H' : '(v16qi) ',
'i' : '(v16qi) ',
'I' : '(v16qi) ',
'f' : '(v4sf)',
'd' : '(v2df)',
}
# SIMD operations.
simdop_x86 = {
'b' : '(v16qi) __builtin_ia32_psubb128',
'h' : '(v8hi) __builtin_ia32_psubw128',
'i' : '(v4si) __builtin_ia32_psubd128',
'f' : '__builtin_ia32_subps',
'd' : '__builtin_ia32_subpd',
}
# Greater than instruction for overflow checking.
# This is also used for less than by reversing the parameters.
vgtinstr_x86 = {
'b' : '__builtin_ia32_pcmpgtb128',
'h' : '__builtin_ia32_pcmpgtw128',
'i' : '__builtin_ia32_pcmpgtd128 ',
'f' : '',
'd' : '',
}
# Equal to instruction for overflow checking.
veqinstr_x86 = {
'b' : '__builtin_ia32_pcmpeqb128',
'h' : '__builtin_ia32_pcmpeqw128',
'i' : '__builtin_ia32_pcmpeqd128 ',
'f' : '',
'd' : '',
}
# Used to negate values when subtracting from zero. We have to include
# the parameters here because the formats in x86 and ARM differ.
vneginstr_x86 = {
'b' : '__builtin_ia32_psignb128(datasliceright, vsignparam)',
'h' : '__builtin_ia32_psignw128(datasliceright, vsignparam)',
'i' : '__builtin_ia32_psignd128(datasliceright, vsignparam)',
}
# Used with vneginstr_x86.
vsignparam_x86 = {
'b' : 'v16qi vsignparam = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1};',
'h' : 'v8hi vsignparam = {-1, -1, -1, -1, -1, -1, -1, -1};',
'i' : 'v4si vsignparam = {-1, -1, -1, -1};',
}
# This is the width of the SIMD registers in number of bits.
simdwidth_x86 = 128
# SIMD mask initialisation data.
simdovintmaxvals_x86 = { 'b' : ', '.join(['SCHAR_MIN'] * (simdwidth_x86 // 8)),
'h' : ', '.join(['SHRT_MIN'] * (simdwidth_x86 // 16)),
'i' : ', '.join(['INT_MIN'] * (simdwidth_x86 // 32)),
}
# Multiplication, used for checking for math errors.
simdmulop_x86 = {'f' : '__builtin_ia32_mulps',
'd' : '__builtin_ia32_mulpd'}
# A list of which array types are supported by x86 SIMD instructions.
x86_simdtypes = tuple(simdop_x86.keys())
# ==============================================================================
# For ARM NEON ARMv7 32 bit.
# Not all possible array types have been implemented as benchmarking
# has shown that SIMD is actually slower for array types with larger
# word sizes.
simdattr_armv7 = {
'b' : 'int8x8_t',
'B' : 'uint8x8_t',
'h' : 'int16x4_t',
'H' : 'uint16x4_t',
'f' : 'float32x2_t',
}
ovflsimdattr_armv7 = {
'b' : 'uint8x8_t',
'B' : 'uint8x8_t',
'h' : 'uint16x4_t',
'H' : 'uint16x4_t',
'f' : 'float32x2_t',
}
vldinstr_armv7 = {
'b' : 'vld1_s8(',
'B' : 'vld1_u8(',
'h' : 'vld1_s16(',
'H' : 'vld1_u16(',
'f' : 'vld1_f32(',
}
vstinstr1_armv7 = {
'b' : 'vst1_s8(',
'B' : 'vst1_u8(',
'h' : 'vst1_s16(',
'H' : 'vst1_u16(',
'f' : 'vst1_f32(',
}
vstinstr2_armv7 = {
'b' : '',
'B' : '',
'h' : '',
'H' : '',
'f' : '',
}
armvecsize_armv7 = {
'b' : 's8',
'B' : 'u8',
'h' : 's16',
'H' : 'u16',
}
# SIMD operations.
simdop_armv7 = {
'b' : 'vsub_s8',
'B' : 'vsub_u8',
'h' : 'vsub_s16',
'H' : 'vsub_u16',
'f' : 'vsub_f32',
}
# Greater than instruction for overflow checking.
vgtinstr_armv7 = {
'b' : 'vcgt_s8',
'B' : 'vcgt_u8',
'h' : 'vcgt_s16',
'H' : 'vcgt_u16',
'f' : '',
}
# Less than instruction for overflow checking.
vltinstr_armv7 = {
'b' : 'vclt_s8',
'B' : 'vclt_u8',
'h' : 'vclt_s16',
'H' : 'vclt_u16',
'f' : '',
}
# Used to calculate overflow conditions.
vsubinstr_armv7 = {
'b' : 'vsub_s8',
'B' : 'vsub_u8',
'h' : 'vsub_s16',
'H' : 'vsub_u16',
'f' : '',
}
# Equal to.
veqinstr_armv7 = {
'b' : 'vceq_s8',
'B' : 'vceq_u8',
'h' : 'vceq_s16',
'H' : 'vceq_u16',
'f' : '',
}
# Used to negate values when subtracting from zero. We have to include
# the parameters here because the formats in x86 and ARM differ.
vneginstr_armv7 = {
'b' : 'vneg_s8(datasliceright)',
'B' : '',
'h' : 'vneg_s16(datasliceright)',
'H' : '',
}
# Used to turn vector results into integers so we can examine them.
vreinterpinstr_armv7 = {
'b' : 'vreinterpret_u64_u8',
'B' : 'vreinterpret_u64_u8',
'h' : 'vreinterpret_u64_u16',
'H' : 'vreinterpret_u64_u16',
'f' : '',
}
# This is the width of the SIMD registers in number of bits.
simdwidth_armv7 = 64
# SIMD mask initialisation data.
simdovintmaxvals_armv7 = {
'b' : ', '.join(['SCHAR_MIN'] * (simdwidth_armv7 // 8)),
'B' : ', '.join(['UCHAR_MAX'] * (simdwidth_armv7 // 8)),
'h' : ', '.join(['SHRT_MIN'] * (simdwidth_armv7 // 16)),
'H' : ', '.join(['USHRT_MAX'] * (simdwidth_armv7 // 16)),
'i' : ', '.join(['INT_MIN'] * (simdwidth_armv7 // 32)),
'I' : ', '.join(['UINT_MAX'] * (simdwidth_armv7 // 32)),
}
# Which array types have overflow checking.
simdovfl_armv7 = ('b', 'B', 'h', 'H')
# A list of which array types are supported by ARM SIMD instructions.
armv7_simdtypes = tuple(simdop_armv7.keys())
# Multiplication, used for checking for math errors.
simdmulop_armv7 = 'vmul_f32'
# ==============================================================================
# For ARM NEON ARMv8 64 bit.
# Not all possible array types have been implemented as benchmarking
# has shown that SIMD is actually slower for array types with larger
# word sizes.
simdattr_armv8 = {
'b' : 'int8x16_t',
'B' : 'uint8x16_t',
'h' : 'int16x8_t',
'H' : 'uint16x8_t',
'i' : 'int32x4_t',
'I' : 'uint32x4_t',
'f' : 'float32x4_t',
}
ovflsimdattr_armv8 = {
'b' : 'uint8x16_t',
'B' : 'uint8x16_t',
'h' : 'uint16x8_t',
'H' : 'uint16x8_t',
'i' : 'uint32x4_t',
'I' : 'uint32x4_t',
'f' : 'float32x4_t',
}
vldinstr_armv8 = {
'b' : 'vld1q_s8(',
'B' : 'vld1q_u8(',
'h' : 'vld1q_s16(',
'H' : 'vld1q_u16(',
'i' : 'vld1q_s32(',
'I' : 'vld1q_u32(',
'f' : 'vld1q_f32(',
}
vstinstr1_armv8 = {
'b' : 'vst1q_s8(',
'B' : 'vst1q_u8(',
'h' : 'vst1q_s16(',
'H' : 'vst1q_u16(',
'i' : 'vst1q_s32(',
'I' : 'vst1q_u32(',
'f' : 'vst1q_f32(',
}
vstinstr2_armv8 = {
'b' : '',
'B' : '',
'h' : '',
'H' : '',
'i' : '',
'I' : '',
'f' : '',
}
vstinstr2_armv8 = {
'b' : '',
'B' : '',
'h' : '',
'H' : '',
'i' : '',
'I' : '',
'f' : '',
}
armvecsize_armv8 = {
'b' : 's8',
'B' : 'u8',
'h' : 's16',
'H' : 'u16',
'i' : 's32',
'I' : 'u32',
}
# SIMD operations.
simdop_armv8 = {
'b' : 'vsubq_s8',
'B' : 'vsubq_u8',
'h' : 'vsubq_s16',
'H' : 'vsubq_u16',
'i' : 'vsubq_s32',
'I' : 'vsubq_u32',
'f' : 'vsubq_f32',
}
# Greater than instruction for overflow checking.
vgtinstr_armv8 = {
'b' : 'vcgtq_s8',
'B' : 'vcgtq_u8',
'h' : 'vcgtq_s16',
'H' : 'vcgtq_u16',
'i' : 'vcgtq_s32',
'I' : 'vcgtq_u32',
'f' : '',
}
# Less than instruction for overflow checking.
vltinstr_armv8 = {
'b' : 'vcltq_s8',
'B' : 'vcltq_u8',
'h' : 'vcltq_s16',
'H' : 'vcltq_u16',
'i' : 'vcltq_s32',
'I' : 'vcltq_u32',
'f' : '',
}
# Equal to.
veqinstr_armv8 = {
'b' : 'vceqq_s8',
'B' : 'vceqq_u8',
'h' : 'vceqq_s16',
'H' : 'vceqq_u16',
'i' : 'vceqq_s32',
'I' : 'vceqq_u32',
'f' : 'vceqq_f32',
}
# Used to negate values when subtracting from zero. We have to include
# the parameters here because the formats in x86 and ARM differ.
vneginstr_armv8 = {
'b' : 'vnegq_s8(datasliceright)',
'B' : '',
'h' : 'vnegq_s16(datasliceright)',
'H' : '',
'i' : 'vnegq_s32(datasliceright)',
'I' : '',
'f' : 'vnegq_f32(datasliceright)',
}
# Used to turn vector results into integers so we can examine them.
vreinterpinstr_armv8 = {
'b' : 'vreinterpretq_u64_u8',
'B' : 'vreinterpretq_u64_u8',
'h' : 'vreinterpretq_u64_u16',
'H' : 'vreinterpretq_u64_u16',
'i' : 'vreinterpretq_u64_u32',
'I' : 'vreinterpretq_u64_u32',
'f' : '',
}
# Used to calculate overflow conditions.
vsubinstr_armv8 = {
'b' : 'vsubq_s8',
'B' : 'vsubq_u8',
'h' : 'vsubq_s16',
'H' : 'vsubq_u16',
'i' : 'vsubq_s32',
'I' : 'vsubq_u32',
'f' : '',
}
# Which array types have overflow checking.
simdovfl_armv8 = ('b', 'B', 'h', 'H', 'i', 'I')
# This is the width of the SIMD registers in number of bits.
simdwidth_armv8 = 128
# SIMD mask initialisation data.
simdovintmaxvals_armv8 = {
'b' : ', '.join(['SCHAR_MIN'] * (simdwidth_armv8 // 8)),
'B' : ', '.join(['UCHAR_MAX'] * (simdwidth_armv8 // 8)),
'h' : ', '.join(['SHRT_MIN'] * (simdwidth_armv8 // 16)),
'H' : ', '.join(['USHRT_MAX'] * (simdwidth_armv8 // 16)),
'i' : ', '.join(['INT_MIN'] * (simdwidth_armv8 // 32)),
'I' : ', '.join(['UINT_MAX'] * (simdwidth_armv8 // 32)),
}
# A list of which array types are supported by ARM SIMD instructions.
armv8_simdtypes = tuple(simdop_armv8.keys())
# Multiplication, used for checking for math errors.
simdmulop_armv8 = 'vmulq_f32'
# ==============================================================================
# Width of array elements.
simdwidth = {'b' : 'CHARSIMDSIZE',
'B' : 'CHARSIMDSIZE',
'h' : 'SHORTSIMDSIZE',
'H' : 'SHORTSIMDSIZE',
'i' : 'INTSIMDSIZE',
'I' : 'INTSIMDSIZE',
'f' : 'FLOATSIMDSIZE',
'd' : 'DOUBLESIMDSIZE',
}
# ==============================================================================
# These get substituted into function call templates.
SIMD_platform_x86 = '#if defined(AF_HASSIMD_X86)'
SIMD_platform_x86_ARM = '#if defined(AF_HASSIMD_X86) || defined(AF_HASSIMD_ARMv7_32BIT) || defined(AF_HASSIMD_ARM_AARCH64)'
SIMD_platform_x86_ARMv8 = '#if defined(AF_HASSIMD_X86) || defined(AF_HASSIMD_ARM_AARCH64)'
SIMD_platform_ARMv7 = '#if defined(AF_HASSIMD_ARMv7_32BIT)'
SIMD_platform_ARM64v8 = '#if defined(AF_HASSIMD_ARM_AARCH64)'
SIMD_platform_ARM = '#if defined(AF_HASSIMD_ARMv7_32BIT) || defined(AF_HASSIMD_ARM_AARCH64)'
# ==============================================================================
# Return the platform SIMD enable C macro.
# This is for the platform independent file, and not the plaform specific
# SIMD files.
def findsimdplatform(arraycode, funcname):
hasx86 = arraycode in x86_simdtypes
hasarmv7 = arraycode in armv7_simdtypes
hasarmv8 = arraycode in armv8_simdtypes
# Only the platforms combinations which are used currently are defined here.
if hasx86 and hasarmv7 and hasarmv8:
return SIMD_platform_x86_ARM
elif hasx86 and (not hasarmv7) and (not hasarmv8):
return SIMD_platform_x86
elif hasx86 and (not hasarmv7) and hasarmv8:
return SIMD_platform_x86_ARMv8
elif (not hasx86) and hasarmv7 and hasarmv8:
return SIMD_platform_ARM
elif (not hasx86) and (not hasarmv7) and hasarmv8:
return SIMD_platform_ARM64v8
else:
print('Error: Template error in findsimdplatform: %s %s' % (arraycode, funcname))
return 'Error: Template error, this should not be here.'
# ==============================================================================
# ==============================================================================
# Create the source code based on templates.
funcname = 'sub'
filename = funcname + '.c'
pyoperator = '-'
copname = '-'
c_operator_i = '-'
c_operator_f = '-'
c_operator_d = '-'
# This code generator script does not use data read from the spreadsheet.
arraytypesdocs = 'si,ui,f'
opcodedocs = 'x - y'
matherrorsdocs = 'OverflowError,ArithmeticError'
# These are the templates for each type specific operation.
float_template = ops_op_float
uint_template = ops_sub_uint
int_template = ops_sub_int
includeoptions = includeoptions_both
macrofilename = funcname + '_defs' + '.h'
# The text to include the function specific macros.
funcdefsblock = '''
// Function specific macros and other definitions.
#include "%s"
''' % macrofilename
# ==============================================================================
# This outputs the main C file.
def CreateHeader(funcname):
''' The header and related code. This is returned as a block of text.
'''
funcdata = {'funclabel' : funcname,
'includeoptions' : includeoptions_both % {'funclabel' : funcname},
}
headtext = mathops_head % funcdata
# Function specific includes.
includextext = funcdefsblock
return headtext, includextext
def CreateArrayDataCCode(arraycode, funcname):
''' Conventional C code for a single data type.
This returns the data to be written later.
It returns two blocks of text, the C code and the call code.
'''
arraytype = codegen_common.arraytypes[arraycode]
funcmodifier = arraytype.replace(' ', '_')
funcdata = {'funcmodifier' : funcmodifier,
'funclabel' : funcname,
'arraytype' : arraytype,
'arraycode' : arraycode,
'copname' : copname,
'intmaxvalue' : codegen_common.maxvalue[arraycode],
'intminvalue' : codegen_common.minvalue[arraycode],
}
if arraycode in codegen_common.floatarrays:
ops_calc = float_template
elif arraycode in codegen_common.unsignedint:
ops_calc = uint_template
elif arraycode in codegen_common.signedint:
ops_calc = int_template
else:
print('Error - Unsupported array code.', arraycode)
# Prepare the SIMD templates.
if arraycode in (set(x86_simdtypes) | set(armv7_simdtypes) | set(armv8_simdtypes)):
simdfuncdata = {'simdwidth' : simdwidth[arraycode],
'funclabel' : funcname,
'funcmodifier' : funcmodifier,
'simdplatform' : findsimdplatform(arraycode, funcname)}
# SIMD without overflow detection.
funcdata.update(dict([(x, y % simdfuncdata) for x,y in SIMD_call.items()]))
# Integer SIMD with overflow detection data.
funcdata.update(dict([(x, y % simdfuncdata) for x,y in SIMD_call_ovfl.items()]))
funcdata['nosimddecl'] = nosimddecl
funcdata['nosimdparam'] = nosimdparam
funcdata['simdwidth'] = simdwidth.get(arraycode, '')
funcdata['simdplatform'] = findsimdplatform(arraycode, funcname)
else:
# SIMD without overflow detection.
funcdata.update(dict([(x, '') for x,y in SIMD_call.items()]))
# Integer SIMD with overflow detection data.
funcdata.update(dict([(x, '') for x,y in SIMD_call_ovfl.items()]))
funcdata['nosimddecl'] = ''
funcdata['nosimdparam'] = ''
# The calculations.
opscalctext = ops_calc % funcdata
# This is the call to the functions for this array type. This
# is inserted into another template below.
funcdata['arraycode'] = arraycode
opscalltext = opscall % funcdata
return opscalctext, opscalltext
with open(filename, 'w') as f:
headtext, includextext = CreateHeader(funcname)
f.write(headtext)
f.write(includextext)
opscalcdatatext = []
opscalldatatext = []
# Check each array type.
for arraycode in codegen_common.arraycodes:
opscalctext, opscalltext = CreateArrayDataCCode(arraycode, funcname)
opscalcdatatext.append(opscalctext)
opscalldatatext.append(opscalltext)
f.write(''.join(opscalcdatatext))
# Write the remaining boilerplate C code.
helpsimd1 = helpsimd1_template % {'funclabel' : funcname}
helpsimd2 = helpsimd2_template
getsimdparam = '1'
supportedarrays = codegen_common.FormatDocsArrayTypes(arraytypesdocs)
f.write(mathops_params % {'funclabel' : funcname,
'opcodedocs' : opcodedocs,
'supportedarrays' : supportedarrays,
'pyoperator' : pyoperator,
'matherrors' : ', '.join(matherrorsdocs.split(',')),
'opscall' : ''.join(opscalldatatext),
'getsimdparam' : getsimdparam,
'helpsimd1' : helpsimd1,
'helpsimd2' : helpsimd2})
# ==============================================================================
# ==============================================================================
# This outputs helper macros.
macrocodedate = '12-Aug-2021'
outputlist = []
# Macro definitions.
# Not array type specific.
outputlist.append(int_ovcheck)
# Array type specific macros.
for arraycode in codegen_common.intarrays:
arraytype = codegen_common.arraytypes[arraycode]
funcdata = {'arraytype' : arraytype,
'funcmodifier' : arraytype.replace(' ', '_'),
'intmaxvalue' : codegen_common.maxvalue[arraycode],
'intminvalue' : codegen_common.minvalue[arraycode],
}
if arraycode in codegen_common.signedint:
outputlist.append(intov_macros_signed % funcdata)
elif arraycode in codegen_common.unsignedint:
outputlist.append(intov_macros_unsigned % funcdata)
# Write out the file.
codegen_common.OutputCHeader(macrofilename,
outputlist,
'Additional macros for %s' % funcname,
'',
macrocodedate)
# ==============================================================================
# Write the SIMD code.
# x86
def SetSIMDData_x86(funcname):
'''Set the SIMD template data for x86. This is for SIMD without
overflow checking.
'''
outputlist = []
# This provides the description in the header of the file.
maindescription = 'Calculate the %s of values in an array.' % funcname
# Function specific includes.
outputlist.append(funcdefsblock)
# Output the generated code.
for arraycode in x86_simdtypes:
arraytype = codegen_common.arraytypes[arraycode]
# The main template values.
funcdata = {'arraytype' : arraytype,
'copname' : copname,
'funclabel' : funcname,
'funcmodifier' : arraytype.replace(' ', '_'),
'intminvalue' : codegen_common.minvalue[arraycode],
'simdattr' : simdattr_x86[arraycode],
'simd_ovflchk_extravars' : '',
'simdplatform' : SIMD_platform_x86,
'simdwidth' : simdwidth[arraycode],
'vldinstr' : vldinstr_x86[arraycode],
'vopinstr' : simdop_x86[arraycode],
'vstinstr1' : vstinstr1_x86[arraycode],
'vstinstr2' : vstinstr2_x86[arraycode],
}
# Helper functions.
outputlist.append(simd_helpers % funcdata)
# No overflow checking, fill in the template.
outputlist.append(ops_simdsupport % funcdata)
# Overflow check. For integer arrays only.
if arraycode in codegen_common.intarrays:
# x86 doesn't have an SIMD less than, so we use gt instead.
simddata = {'vltinstr' : vgtinstr_x86[arraycode],
'vgtinstr' : vgtinstr_x86[arraycode],
'veqinstr' : veqinstr_x86[arraycode],
'vreinterpinstr' : '',
}
# Add this back into the template values.
funcdata['simd_pos_willoverflow_12'] = simd_pos_willoverflow_12_x86 % simddata
funcdata['simd_pos_willoverflow_34'] = simd_pos_willoverflow_34_x86 % simddata
funcdata['simd_neg_willoverflow_12'] = simd_neg_willoverflow_12_x86 % simddata
funcdata['simd_neg_willoverflow_34'] = simd_neg_willoverflow_34_x86 % simddata
funcdata['simd_equ_willoverflow'] = simd_equ_willoverflow_x86 % simddata
funcdata['simdovintmaxvals'] = simdovintmaxvals_x86[arraycode]
funcdata['vneginstr'] = vneginstr_x86[arraycode]
funcdata['vsignparam'] = vsignparam_x86[arraycode]
funcdata['ovflsimdattr'] = ovflsimdattr_x86[arraycode]
# With overflow checking, fill in the template.
outputlist.append(ops_simdsupport_ovfl_signed % funcdata)
# For float arrays.
elif arraycode in codegen_common.floatarrays:
funcdata['simdmul'] = simdmulop_x86[arraycode]
# With overflow checking, fill in the template.
outputlist.append(ops_simdsupport_ovfl_float % funcdata)
return outputlist
# ARMv7
def SetSIMDData_ARMv7(funcname):
'''Set the SIMD template data for ARMv7. This is for SIMD without
overflow checking.
'''
outputlist = []
# This provides the description in the header of the file.
maindescription = 'Calculate the %s of values in an array.' % funcname
# Function specific includes.
outputlist.append(funcdefsblock)
# Output the generated code.
for arraycode in armv7_simdtypes:
arraytype = codegen_common.arraytypes[arraycode]
# The main template values.
funcdata = {'arraytype' : arraytype,
'copname' : copname,
'funclabel' : funcname,
'funcmodifier' : arraytype.replace(' ', '_'),
'intminvalue' : codegen_common.minvalue[arraycode],
'simdattr' : simdattr_armv7[arraycode],
'simd_ovflchk_extravars' : '',
'simdplatform' : SIMD_platform_ARMv7,
'simdwidth' : simdwidth[arraycode],
'vldinstr' : vldinstr_armv7[arraycode],
'vopinstr' : simdop_armv7[arraycode],
'vstinstr1' : vstinstr1_armv7[arraycode],
'vstinstr2' : vstinstr2_armv7[arraycode],
}
# Helper functions.
outputlist.append(simd_helpers % funcdata)
# No overflow checking, fill in the template.
outputlist.append(ops_simdsupport % funcdata)
# Overflow check. For some array types only.
if arraycode in simdovfl_armv7:
simddata = {'vltinstr' : vltinstr_armv7[arraycode],
'vgtinstr' : vgtinstr_armv7[arraycode],
'veqinstr' : veqinstr_armv7[arraycode],
'vreinterpinstr' : vreinterpinstr_armv7[arraycode],
}
# Add this back into the template values.
funcdata['simd_pos_willoverflow_12'] = simd_pos_willoverflow_12_armv7 % simddata
funcdata['simd_pos_willoverflow_34'] = simd_pos_willoverflow_34_armv7 % simddata
funcdata['simd_neg_willoverflow_12'] = simd_neg_willoverflow_12_armv7 % simddata
funcdata['simd_neg_willoverflow_34'] = simd_neg_willoverflow_34_armv7 % simddata
funcdata['simd_equ_willoverflow'] = simd_equ_willoverflow_armv7 % simddata
funcdata['simd_unsigned_willoverflow'] = simd_unsigned_willoverflow_armv7 % simddata
funcdata['simdovintmaxvals'] = simdovintmaxvals_armv7[arraycode]
funcdata['vneginstr'] = vneginstr_armv7[arraycode]
funcdata['vsignparam'] = ''
funcdata['ovflsimdattr'] = ovflsimdattr_armv7[arraycode]
funcdata['vsubinstr'] = vsubinstr_armv7[arraycode]
# With overflow checking, fill in the template.
# For signed intergers.
if arraycode in codegen_common.signedint:
outputlist.append(ops_simdsupport_ovfl_signed % funcdata)
# For unsigned integers.
elif arraycode in codegen_common.unsignedint:
outputlist.append(ops_simdsupport_ovfl_unsigned % funcdata)
# For float arrays.
elif arraycode == 'f':
funcdata['simdmul'] = simdmulop_armv7
# With overflow checking, fill in the template.
outputlist.append(ops_simdsupport_ovfl_float % funcdata)
return outputlist
# ARMv8
def SetSIMDData_ARMv8(funcname):
'''Set the SIMD template data for ARMv8. This is for SIMD without
overflow checking.
'''
outputlist = []
# This provides the description in the header of the file.
maindescription = 'Calculate the %s of values in an array.' % funcname
# Function specific includes.
outputlist.append(funcdefsblock)
# Output the generated code.
for arraycode in armv8_simdtypes:
arraytype = codegen_common.arraytypes[arraycode]
# The main template values.
funcdata = {'arraytype' : arraytype,
'copname' : copname,
'funclabel' : funcname,
'funcmodifier' : arraytype.replace(' ', '_'),
'intminvalue' : codegen_common.minvalue[arraycode],
'simdattr' : simdattr_armv8[arraycode],
'simd_ovflchk_extravars' : simd_ovflchk_extravars_armv8,
'simdplatform' : SIMD_platform_ARM64v8,
'simdwidth' : simdwidth[arraycode],
'vldinstr' : vldinstr_armv8[arraycode],
'vopinstr' : simdop_armv8[arraycode],
'vstinstr1' : vstinstr1_armv8[arraycode],
'vstinstr2' : vstinstr2_armv8[arraycode],
}
# Helper functions.
outputlist.append(simd_helpers % funcdata)
# No overflow checking, fill in the template.
outputlist.append(ops_simdsupport % funcdata)
# Overflow check. For some array types only.
if arraycode in simdovfl_armv8:
simddata = {'vltinstr' : vltinstr_armv8[arraycode],
'vgtinstr' : vgtinstr_armv8[arraycode],
'veqinstr' : veqinstr_armv8[arraycode],
'vreinterpinstr' : vreinterpinstr_armv8[arraycode],
}
# Add this back into the template values.
funcdata['simd_pos_willoverflow_12'] = simd_pos_willoverflow_12_armv8 % simddata
funcdata['simd_pos_willoverflow_34'] = simd_pos_willoverflow_34_armv8 % simddata
funcdata['simd_neg_willoverflow_12'] = simd_neg_willoverflow_12_armv8 % simddata
funcdata['simd_neg_willoverflow_34'] = simd_neg_willoverflow_34_armv8 % simddata
funcdata['simd_equ_willoverflow'] = simd_equ_willoverflow_armv8 % simddata
funcdata['simd_unsigned_willoverflow'] = simd_unsigned_willoverflow_armv8 % simddata
funcdata['simdovintmaxvals'] = simdovintmaxvals_armv8[arraycode]
funcdata['vneginstr'] = vneginstr_armv8[arraycode]
funcdata['vsignparam'] = ''
funcdata['ovflsimdattr'] = ovflsimdattr_armv8[arraycode]
funcdata['vsubinstr'] = vsubinstr_armv8[arraycode]
# With overflow checking, fill in the template.
# For signed intergers.
if arraycode in codegen_common.signedint:
outputlist.append(ops_simdsupport_ovfl_signed % funcdata)
# For unsigned integers.
elif arraycode in codegen_common.unsignedint:
outputlist.append(ops_simdsupport_ovfl_unsigned % funcdata)
# For float arrays.
elif arraycode == 'f':
funcdata['simdmul'] = simdmulop_armv8
# With overflow checking, fill in the template.
outputlist.append(ops_simdsupport_ovfl_float % funcdata)
return outputlist
def WriteSIMDCode(funcname, simdplatform, simdfilename, simdcodedate, includextext, outputlist):
'''This writes out the SIMD code to the .c and .h files.
'''
# The SIMD options to select the additional file header info.
simdoptions = {
'x86' : ['simddefs'],
'armv7' : ['simddefs', 'simdmacromsg_armv7'],
'armv8' : ['simddefs', 'simdmacromsg_armv8'],
}
outputfull = [includextext] + outputlist
# This provides the description in the header of the file.
maindescription = 'Calculate the %s of values in an array.' % funcname
# This outputs the SIMD version.
codegen_common.OutputSourceCode(funcname + simdfilename + '.c', outputfull,
maindescription,
codegen_common.SIMDDescription,
simdcodedate,
'', simdoptions[simdplatform])
# Output the .h header file.
headedefs = codegen_common.GenSIMDCHeaderText(outputlist, funcname)
# Write out the file.
codegen_common.OutputCHeader(funcname + simdfilename + '.h', headedefs,
maindescription,
codegen_common.SIMDDescription,
simdcodedate)
# Output SIMD code.
# Function specific includes.
includextext = funcdefsblock
# x86.
simdcodedate = '1-Apr-2019'
simdfilename = '_simd_x86'
outputlist = SetSIMDData_x86(funcname)
WriteSIMDCode(funcname, 'x86', simdfilename, simdcodedate, includextext, outputlist)
simdcodedate = '8-Oct-2019'
simdfilename = '_simd_armv7'
outputlist = SetSIMDData_ARMv7(funcname)
WriteSIMDCode(funcname, 'armv7', simdfilename, simdcodedate, includextext, outputlist)
simdcodedate = '26-Mar-2020'
simdfilename = '_simd_armv8'
outputlist = SetSIMDData_ARMv8(funcname)
WriteSIMDCode(funcname, 'armv8', simdfilename, simdcodedate, includextext, outputlist)
# ==============================================================================
| 28.252079
| 209
| 0.653035
| 13,494
| 108,714
| 5.11746
| 0.05847
| 0.026356
| 0.011933
| 0.006777
| 0.803012
| 0.780508
| 0.761364
| 0.741351
| 0.723554
| 0.712794
| 0
| 0.022195
| 0.1703
| 108,714
| 3,847
| 210
| 28.259423
| 0.743381
| 0.10351
| 0
| 0.623954
| 1
| 0.023574
| 0.81883
| 0.164084
| 0
| 0
| 0.000619
| 0
| 0
| 1
| 0.002662
| false
| 0.00038
| 0.00076
| 0
| 0.0327
| 0.00076
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d91ac48546232f23ae4a19e1a027c393c5c8ba9b
| 196
|
py
|
Python
|
main/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | null | null | null |
main/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | null | null | null |
main/admin.py
|
rafiatha09/berlapan
|
31fc032fbbab6d67b6c20db2eb5626d844e47ae0
|
[
"Unlicense"
] | 1
|
2021-10-22T00:32:17.000Z
|
2021-10-22T00:32:17.000Z
|
#admin.py
from django.contrib import admin
from .models import Profile
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
admin.site.register(Profile)
| 21.777778
| 47
| 0.816327
| 29
| 196
| 5.517241
| 0.448276
| 0.1875
| 0.31875
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 196
| 9
| 48
| 21.777778
| 0.91954
| 0.040816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d91c857f6c733e4e57d9d482edb5e05c91833f8e
| 118
|
py
|
Python
|
swagexample/__init__.py
|
allenai/swagexample
|
f599d48a15b5de79d141b10b007e18bfc449b84d
|
[
"Apache-2.0"
] | 1
|
2018-10-14T00:52:01.000Z
|
2018-10-14T00:52:01.000Z
|
swagexample/__init__.py
|
allenai/swagexample
|
f599d48a15b5de79d141b10b007e18bfc449b84d
|
[
"Apache-2.0"
] | null | null | null |
swagexample/__init__.py
|
allenai/swagexample
|
f599d48a15b5de79d141b10b007e18bfc449b84d
|
[
"Apache-2.0"
] | null | null | null |
"""An example submission for the SWAG leaderboard."""
from swagexample import models
from swagexample import readers
| 23.6
| 53
| 0.805085
| 15
| 118
| 6.333333
| 0.8
| 0.315789
| 0.442105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135593
| 118
| 4
| 54
| 29.5
| 0.931373
| 0.398305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9543cc4cb001d38a070f160eaf5180862be8836
| 10,891
|
py
|
Python
|
run.py
|
FGacheru/password-locker
|
262f16b829159d889a7056f98120535d35c9f6b9
|
[
"MIT"
] | null | null | null |
run.py
|
FGacheru/password-locker
|
262f16b829159d889a7056f98120535d35c9f6b9
|
[
"MIT"
] | null | null | null |
run.py
|
FGacheru/password-locker
|
262f16b829159d889a7056f98120535d35c9f6b9
|
[
"MIT"
] | null | null | null |
from users import User
from credentials import Credentials
def create_users(username,password):
'''
Function to create a new user
'''
new_user = User(username,password)
return new_user
def save_users(user):
'''
Function to save contact
'''
users.save_users()
def del_user(user):
'''
Function to delete a user
'''
user.delete_user()
def find_user(username):
'''
Function that finds a user by username and returns the user
'''
return User.find_by_username(username)
def check_existing_users(username):
'''
Function that check if a user exists with that number and return a Boolean
'''
return User.users_exist(number)
def display_users():
'''
Function that returns all the saved users
'''
return User.display_users()
def create_credentials(account,username1,password1):
'''
Function to create a new user
'''
new_credentials = Credentials(account,username1,password)
return new_credentials
def save_credentials(credentials):
'''
Function to save contact
'''
credentials.save_credentials()
def del_credentials(credentials):
'''
Function to delete a credentials
'''
credentials.delete_credentials()
def find_credentials(username1):
'''
Function that finds a user by username and returns the credential
'''
return Credentials.find_by_username(username1)
def check_existing_credentials(username1):
'''
Function that check if a user exists with that username1 and return a Boolean
'''
return Credentials.credentials_exist(username)
def display_credentials():
'''
Function that returns all the saved users
'''
return Credentials.display_credentials()
from users import User
from credentials import Credentials
def create_user(account,username,password):
'''
creating new user
'''
new_users = User(username,password)
return new_users
def save_users(users):
users.save_users()
def create_credentials (account,username1,password1):
new_credentials = Credentials(account,username1,password1)
return new_credentials
def save_credentials(credentials):
credentials.save_credentials()
def delete_credentials(Credentials):
Credentials.delete_credentials()
def find_credentials(account):
return Credentials.find_by_account(account)
def display_credentials():
'''
Function that returns all the saved credentials
'''
return Credentials.display_credentials()
def check_existing_credentials(account):
'''
Function that check if a contact exists with that number and return a Boolean
'''
return Credentials.credentials_exist(account)
def main():
print("Hello! Welcome to an application that help you manage your credentials")
print('Use the these commands to proceed: CA = create account,' )
short_code = input().lower()
if short_code == 'ca':
print('Enter new account details')
print('*' * 100)
account = input('Enter account: ')
username = input('Enter Username: ')
while True:
print('use : MP = to Enter your password manually')
password_choice = input().lower()
if password_choice == 'mp':
password = input('Enter Password:')
break
else:
print('Invalid short code.Please try again')
save_user(create_user(username, password))
print('*' * 100)
print(f'Welcome {username} to your new account your password is <--- {password} --->')
print('*' * 100)
while True:
print('Use these short codes to manage credentials: \n NC = new credential, \n VC = display credentials,\n SC = find credential \n Dc = delete credential, \n EX = exit application')
short_code = input().lower()
if short_code == 'nc':
print('Enter New Credential Details')
print('*' * 100)
account = input('Account Name : ')
username1 = input('Username : ')
while True:
print('Use: MP = manually enter password?')
password_choice = input().lower()
if password_choice == 'mp':
password = input('Enter password : ')
break
else:
print('Invalid short code. Please try again')
print('*' * 100)
save_credentials(create_credentials(account, username1,password))
print('*' * 100)
print(f'Your {account} account has been saved')
print('*' * 100)
elif short_code == 'vc':
if display_credentials():
print('Your saved credentials are:')
for account in display_credentials():
print('*' * 100)
print(f' Username: {username1} \n Password: {password}')
print('*' * 100)
else:
print('*' * 100)
print('You have No Credentials. Please Create One')
print('*' * 100)
elif short_code == 'dc':
print('Enter Account name to delete...')
# name = input('Acount Name : ')
print('*' * 100)
if find_credentials(name):
name_result = find_credentials(name)
name_result.delete_credentials()
print(f'Account {name} has been successfully deleted ')
print('*' * 100)
else:
print('Incorrect account name')
print('*' * 100)
elif short_code == 'sc':
print('Enter Account Name To Search...')
search = input('Account Name : ')
print('*' * 100)
if find_credentials(search):
search = find_credentials(search)
print(f'Account Name: {search} ')
print('*' * 100)
else:
print('Credentials does not exist')
print('*' * 100)
elif short_code == 'ex':
print('Goodbye')
print('*' * 100)
break
else:
print('Invalid Short code. Please try again!')
print('*' * 100)
if __name__ == '__main__':
main()
from users import User
from credentials import Credentials
def create_user(account,username,password):
'''
creating new user
'''
new_user = User(account,username,password)
return new_users
def save_users(users):
user.save_users()
def create_credentials (account,username1,password1):
new_credential = Credentials(account,username1,password1)
return new_credentials
def save_credentials(credentials):
credentials.save_credentials()
def delete_credentials(Credentials):
Credentials.delete_credentials()
def find_credentials(account):
return Credentials.find_by_account(account)
def display_credentials():
'''
Function that returns all the saved contacts
'''
return Credentials.display_credentials()
def check_existing_credentials(account):
'''
Function that check if a contact exists with that number and return a Boolean
'''
return Credentials.credentials_exist(account)
def main():
print("Hi! welcome to an application that help you manage your credentials")
print('Welcome to Password Locker. Use the these commands to proceed: CA = create account,' )
short_code = input().lower()
if short_code == 'ca':
print('Enter new account details')
print('*' * 100)
username = input('Enter Username: ')
while True:
print('use : MP = to manually enter your own password')
password_choice = input().lower()
if password_choice == 'mp':
password = input('Enter Password: ')
break
else:
print('Invalid short code. Please try again')
save_users(create_user(account,username, password))
print('*' * 100)
print(f'Welcome {username} your password is <--- {password} --->')
print('*' * 100)
while True:
print('Use these short codes to manage credentials: \n NC = new credential, \n VC = display credentials,\n SC = find credential \n Dc = delete credential, \n EX = exit application')
short_code = input().lower()
if short_code == 'nc':
print('Enter New Credentials Details')
print('*' * 100)
account = input('Account Name : ')
username1 = input('Username : ')
while True:
print('Use: MP = manually enter password?')
password_choice = input().lower()
if password_choice == 'mp':
password = input('Enter password : ')
break
else:
print('Invalid short code. Please try again')
print('*' * 100)
save_credentials(create_credentials(account, username1,password1))
print('*' * 100)
print(f'Your {account} account has been saved')
print('*' * 100)
elif short_code == 'vc':
if display_credentials():
print('Your saved credentials are:')
for account in display_credentials():
print('*' * 100)
print(f' Name: {account} \n Username: {username1} \n Password: {password}')
print('*' * 100)
else:
print('*' * 100)
print('You have No Credentials. Please Create One')
print('*' * 100)
elif short_code == 'dc':
print('Enter Account name to delete...')
name = input('Acount Name : ')
print('*' * 100)
if find_credentials(name):
name_result = find_credentials(name)
name_result.delete_credentials()
print(f'Account {name} has been successfully deleted ')
print('*' * 100)
else:
print('Incorrect account name')
print('*' * 100)
elif short_code == 'sc':
print('Enter Account Name To Search...')
search = input('Account Name : ')
print('*' * 100)
if find_credentials(search):
search = find_credentials(search)
print(f'Account Name: {search} ')
print('*' * 100)
else:
print('Credentials does not exist')
print('*' * 100)
elif short_code == 'ex':
print('Goodbye')
print('*' * 100)
break
else:
print('Invalid Short code. Please try again!')
print('*' * 100)
if __name__ == '__main__':
main()
| 34.684713
| 195
| 0.575429
| 1,134
| 10,891
| 5.406526
| 0.094356
| 0.049584
| 0.035231
| 0.022182
| 0.885663
| 0.862828
| 0.862828
| 0.828576
| 0.79726
| 0.737889
| 0
| 0.018336
| 0.318979
| 10,891
| 313
| 196
| 34.795527
| 0.808278
| 0.077862
| 0
| 0.822034
| 0
| 0.008475
| 0.228454
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.127119
| false
| 0.15678
| 0.025424
| 0.008475
| 0.228814
| 0.347458
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d96b5281bdb1d32fadeca175199157489729daf2
| 19,754
|
py
|
Python
|
tests_v1/hive_mongo_test.py
|
jhoe123/Elastos.Hive.Node
|
96b0c3c4a6ba29db4a4920a03c7efb9e7a991833
|
[
"MIT"
] | 2
|
2022-01-30T05:24:17.000Z
|
2022-03-29T21:31:21.000Z
|
tests_v1/hive_mongo_test.py
|
jhoe123/Elastos.Hive.Node
|
96b0c3c4a6ba29db4a4920a03c7efb9e7a991833
|
[
"MIT"
] | 3
|
2021-11-25T13:38:56.000Z
|
2022-03-16T02:08:39.000Z
|
tests_v1/hive_mongo_test.py
|
jhoe123/Elastos.Hive.Node
|
96b0c3c4a6ba29db4a4920a03c7efb9e7a991833
|
[
"MIT"
] | 2
|
2022-02-17T09:14:52.000Z
|
2022-03-01T07:23:50.000Z
|
import json
import unittest
import flask_unittest
import logging
from tests_v1 import test_common
from hive.util.constants import HIVE_MODE_TEST
from src import create_app
logger = logging.getLogger()
logger.level = logging.DEBUG
class HiveMongoDbTestCase(flask_unittest.ClientTestCase):
app = create_app(mode=HIVE_MODE_TEST)
@classmethod
def setUpClass(cls):
logging.getLogger("HiveMongoDbTestCase").debug("Setting up HiveMongoDbTestCase\n")
@classmethod
def tearDownClass(cls):
logging.getLogger("HiveMongoDbTestCase").debug("\n\nShutting down HiveMongoDbTestCase")
def setUp(self, client):
logging.getLogger("HiveMongoDbTestCase").info("\n")
self.app.config['TESTING'] = True
self.content_type = ("Content-Type", "application/json")
self.json_header = [self.content_type, ]
test_common.setup_test_auth_token()
self.init_auth()
self.did = test_common.get_auth_did()
self.app_id = test_common.get_auth_app_did()
test_common.setup_test_vault(self.did)
self.create_collection(client)
def init_auth(self):
token = test_common.get_auth_token()
self.auth = [
("Authorization", "token " + token),
self.content_type,
]
def tearDown(self, client):
test_common.delete_test_auth_token()
logging.getLogger("HiveMongoDbTestCase").info("\n")
def init_db(self):
pass
def parse_response(self, r):
try:
logging.getLogger("HiveMongoDbTestCase").debug("\nret:" + str(r.get_data()))
v = json.loads(r.get_data())
except json.JSONDecodeError:
v = None
return v, r.status_code
def assert200(self, status):
self.assertEqual(status, 200)
def assert201(self, status):
self.assertEqual(status, 201)
def create_collection(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_1_create_collection")
r, s = self.parse_response(
client.post('/api/v1/db/create_collection', data=json.dumps({"collection": "works"}), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
r, s = self.parse_response(
client.post('/api/v1/db/create_collection', data=json.dumps({"collection": "works"}), headers=self.auth)
)
self.assert200(s)
self.assertTrue(r["existing"])
def test_2_insert_one(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_2_insert_one")
r, s = self.parse_response(
client.post('/api/v1/db/insert_one', data=json.dumps(
{
"collection": "works",
"document": {
"author": "john doe1",
"title": "Eve for Dummies2"
},
"options": {"bypass_document_validation": False}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_3_insert_many(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_3_insert_many")
r, s = self.parse_response(
client.post('/api/v1/db/insert_many', data=json.dumps(
{
"collection": "works",
"document": [
{
"author": "john doe1",
"title": "Eve for Dummies1_2"
},
{
"author": "john doe2",
"title": "Eve for Dummies2"
},
{
"author": "john doe3",
"title": "Eve for Dummies3"
}
],
"options": {"bypass_document_validation": False, "ordered": True}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_4_count_documents(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_8_count_documents")
r, s = self.parse_response(
client.post('/api/v1/db/count_documents', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe1_1",
},
"options": {
"skip": 0,
"limit": 10,
"maxTimeMS": 1000000000
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_5_find_one(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_9_find_one")
r, s = self.parse_response(
client.post('/api/v1/db/find_one', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe2",
},
"options": {
"skip": 0,
"projection": {"_id": False},
"sort": {'_id': -1},
"allow_partial_results": False,
"return_key": False,
"show_record_id": False,
"batch_size": 0
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_6_1_find_one_null_filter(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_9_1_find_one_null_filter")
r, s = self.parse_response(
client.post('/api/v1/db/find_one', data=json.dumps(
{
"collection": "works",
"options": {
"skip": 0,
"projection": {"_id": False},
"sort": {'_id': -1},
"allow_partial_results": False,
"return_key": False,
"show_record_id": False,
"batch_size": 0
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_7_find_many(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_10_find_many")
r, s = self.parse_response(
client.post('/api/v1/db/find_many', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe1"
},
"options": {
"skip": 0,
"limit": 3,
"projection": {"_id": False},
"sort": {"_id": -1},
"allow_partial_results": False,
"return_key": False,
"show_record_id": False,
"batch_size": 0
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_8_find_many_none_filter(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_10_find_many_none_filter")
r, s = self.parse_response(
client.post('/api/v1/db/find_many', data=json.dumps(
{
"collection": "works",
"options": {
"skip": 0,
"limit": 3,
"projection": {"_id": False},
"sort": {"_id": -1},
"allow_partial_results": False,
"return_key": False,
"show_record_id": False,
"batch_size": 0
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_9_update_one(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_4_update_one")
r, s = self.parse_response(
client.post('/api/v1/db/update_one', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1"
},
"update": {"$set": {
"author": "john doe3_1",
"title": "Eve for Dummies3_1"
}},
"options": {
"upsert": True,
"bypass_document_validation": False
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_10_update_many(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_5_update_many")
r, s = self.parse_response(
client.post('/api/v1/db/update_many', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe2",
},
"update": {"$set": {
"author": "john doe1_1",
"title": "Eve for Dummies1_1"
}},
"options": {
"upsert": True,
"bypass_document_validation": False
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_11_delete_one(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_6_delete_one")
r, s = self.parse_response(
client.post('/api/v1/db/delete_one', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1",
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_12_delete_many(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_7_delete_many")
r, s = self.parse_response(
client.post('/api/v1/db/delete_many', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1",
}
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
def test_13_delete_collection(self, client):
logging.getLogger("HiveMongoDbTestCase").debug("\nRunning test_1_2_delete_collection")
r, s = self.parse_response(
client.post('/api/v1/db/delete_collection', data=json.dumps(
{
"collection": "works"
}
), headers=self.auth)
)
self.assert200(s)
self.assertEqual(r["_status"], "OK")
r, s = self.parse_response(
client.post('/api/v1/db/insert_one', data=json.dumps(
{
"collection": "works",
"document": {
"author": "john doe1",
"title": "Eve for Dummies2"
},
"options": {"bypass_document_validation": False}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
r, s = self.parse_response(
client.post('/api/v1/db/insert_many', data=json.dumps(
{
"collection": "works",
"document": [
{
"author": "john doe1",
"title": "Eve for Dummies1_2"
},
{
"author": "john doe2",
"title": "Eve for Dummies2"
},
{
"author": "john doe3",
"title": "Eve for Dummies3"
}
],
"options": {"bypass_document_validation": False, "ordered": True}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
r, s = self.parse_response(
client.post('/api/v1/db/update_one', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1"
},
"update": {"$set": {
"author": "john doe3_1",
"title": "Eve for Dummies3_1"
}},
"options": {
"upsert": True,
"bypass_document_validation": False
}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
r, s = self.parse_response(
client.post('/api/v1/db/update_many', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe1",
},
"update": {"$set": {
"author": "john doe1_1",
"title": "Eve for Dummies1_1"
}},
"options": {
"upsert": True,
"bypass_document_validation": False
}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
r, s = self.parse_response(
client.post('/api/v1/db/delete_one', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1",
}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
r, s = self.parse_response(
client.post('/api/v1/db/delete_many', data=json.dumps(
{
"collection": "works",
"filter": {
"author": "john doe3_1",
}
}
), headers=self.auth)
)
self.assertEqual(s, 404)
if __name__ == '__main__':
unittest.main()
| 46.262295
| 116
| 0.336894
| 1,288
| 19,754
| 4.967391
| 0.114907
| 0.026571
| 0.018756
| 0.034386
| 0.804783
| 0.763832
| 0.763832
| 0.763832
| 0.754142
| 0.6588
| 0
| 0.024261
| 0.570163
| 19,754
| 426
| 117
| 46.370892
| 0.729243
| 0
| 0
| 0.563776
| 0
| 0
| 0.162043
| 0.041055
| 0
| 0
| 0
| 0
| 0.096939
| 1
| 0.056122
| false
| 0.022959
| 0.017857
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7947d033e8d393f86da4736436a387d28b8a58ad
| 7,799
|
py
|
Python
|
mmdet/core/bbox/bbox_target.py
|
arthur801031/3d-multi-resolution-rcnn
|
8e5454a72f8daa174bf3eabfa5964152f04ab287
|
[
"Apache-2.0"
] | 16
|
2021-03-02T07:41:01.000Z
|
2022-03-14T08:55:45.000Z
|
mmdet/core/bbox/bbox_target.py
|
arthur801031/3d-multi-resolution-rcnn
|
8e5454a72f8daa174bf3eabfa5964152f04ab287
|
[
"Apache-2.0"
] | 2
|
2022-01-06T20:54:13.000Z
|
2022-02-24T03:50:51.000Z
|
mmdet/core/bbox/bbox_target.py
|
arthur801031/3d-multi-resolution-rcnn
|
8e5454a72f8daa174bf3eabfa5964152f04ab287
|
[
"Apache-2.0"
] | 2
|
2021-05-26T19:23:35.000Z
|
2022-01-06T20:30:24.000Z
|
import torch
from .transforms import bbox2delta, bbox2delta3d
from ..utils import multi_apply
def bbox_target(pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
concat=True):
labels, label_weights, bbox_targets, bbox_weights = multi_apply(
bbox_target_single,
pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
cfg=cfg,
reg_classes=reg_classes,
target_means=target_means,
target_stds=target_stds)
if concat:
labels = torch.cat(labels, 0)
label_weights = torch.cat(label_weights, 0)
bbox_targets = torch.cat(bbox_targets, 0)
bbox_weights = torch.cat(bbox_weights, 0)
return labels, label_weights, bbox_targets, bbox_weights
def bbox_target_3d(pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
concat=True):
labels, label_weights, bbox_targets, bbox_weights = multi_apply(
bbox_target_single_3d,
pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
cfg=cfg,
reg_classes=reg_classes,
target_means=target_means,
target_stds=target_stds)
if concat:
labels = torch.cat(labels, 0)
label_weights = torch.cat(label_weights, 0)
bbox_targets = torch.cat(bbox_targets, 0)
bbox_weights = torch.cat(bbox_weights, 0)
return labels, label_weights, bbox_targets, bbox_weights
def bbox_target_3d_parcel(pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
pos_gt_bregions_list,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
concat=True):
labels, label_weights, bbox_targets, bbox_weights, bregions, bregion_weights = multi_apply(
bbox_target_single_3d_parcel,
pos_bboxes_list,
neg_bboxes_list,
pos_gt_bboxes_list,
pos_gt_labels_list,
pos_gt_bregions_list,
cfg=cfg,
reg_classes=reg_classes,
target_means=target_means,
target_stds=target_stds)
if concat:
labels = torch.cat(labels, 0)
label_weights = torch.cat(label_weights, 0)
bbox_targets = torch.cat(bbox_targets, 0)
bbox_weights = torch.cat(bbox_weights, 0)
bregions = torch.cat(bregions, 0)
bregion_weights = torch.cat(bregion_weights, 0)
return labels, label_weights, bbox_targets, bbox_weights, bregions, bregion_weights
def bbox_target_single(pos_bboxes,
neg_bboxes,
pos_gt_bboxes,
pos_gt_labels,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]):
num_pos = pos_bboxes.size(0)
num_neg = neg_bboxes.size(0)
num_samples = num_pos + num_neg
labels = pos_bboxes.new_zeros(num_samples, dtype=torch.long)
label_weights = pos_bboxes.new_zeros(num_samples)
bbox_targets = pos_bboxes.new_zeros(num_samples, 4)
bbox_weights = pos_bboxes.new_zeros(num_samples, 4)
if num_pos > 0:
labels[:num_pos] = pos_gt_labels
pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight
label_weights[:num_pos] = pos_weight
pos_bbox_targets = bbox2delta(pos_bboxes, pos_gt_bboxes, target_means,
target_stds)
bbox_targets[:num_pos, :] = pos_bbox_targets
bbox_weights[:num_pos, :] = 1
if num_neg > 0:
label_weights[-num_neg:] = 1.0
return labels, label_weights, bbox_targets, bbox_weights
def bbox_target_single_3d(pos_bboxes,
neg_bboxes,
pos_gt_bboxes,
pos_gt_labels,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0]):
num_pos = pos_bboxes.size(0)
num_neg = neg_bboxes.size(0)
num_samples = num_pos + num_neg
labels = pos_bboxes.new_zeros(num_samples, dtype=torch.long)
label_weights = pos_bboxes.new_zeros(num_samples)
bbox_targets = pos_bboxes.new_zeros(num_samples, 6)
bbox_weights = pos_bboxes.new_zeros(num_samples, 6)
if num_pos > 0:
labels[:num_pos] = pos_gt_labels
pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight
label_weights[:num_pos] = pos_weight
pos_bbox_targets = bbox2delta3d(pos_bboxes, pos_gt_bboxes, target_means,
target_stds)
bbox_targets[:num_pos, :] = pos_bbox_targets
bbox_weights[:num_pos, :] = 1
if num_neg > 0:
label_weights[-num_neg:] = 1.0
# if torch.isnan(bbox_targets).any().item() == 1:
# breakpoint()
return labels, label_weights, bbox_targets, bbox_weights
def bbox_target_single_3d_parcel(pos_bboxes,
neg_bboxes,
pos_gt_bboxes,
pos_gt_labels,
pos_gt_bregions,
cfg,
reg_classes=1,
target_means=[.0, .0, .0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0]):
num_pos = pos_bboxes.size(0)
num_neg = neg_bboxes.size(0)
num_samples = num_pos + num_neg
labels = pos_bboxes.new_zeros(num_samples, dtype=torch.long)
bregions = pos_bboxes.new_zeros(num_samples, dtype=torch.long)
label_weights = pos_bboxes.new_zeros(num_samples)
bregion_weights = pos_bboxes.new_zeros(num_samples)
bbox_targets = pos_bboxes.new_zeros(num_samples, 6)
bbox_weights = pos_bboxes.new_zeros(num_samples, 6)
if num_pos > 0:
labels[:num_pos] = pos_gt_labels
bregions[:num_pos] = pos_gt_bregions
pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight
label_weights[:num_pos] = pos_weight
bregion_weights[:num_pos] = pos_weight
pos_bbox_targets = bbox2delta3d(pos_bboxes, pos_gt_bboxes, target_means,
target_stds)
bbox_targets[:num_pos, :] = pos_bbox_targets
bbox_weights[:num_pos, :] = 1
if num_neg > 0:
label_weights[-num_neg:] = 1.0
bregion_weights[-num_neg:] = 1.0
# if torch.isnan(bbox_targets).any().item() == 1:
# breakpoint()
return labels, label_weights, bbox_targets, bbox_weights, bregions, bregion_weights
def expand_target(bbox_targets, bbox_weights, labels, num_classes):
breakpoint()
bbox_targets_expand = bbox_targets.new_zeros((bbox_targets.size(0),
4 * num_classes))
bbox_weights_expand = bbox_weights.new_zeros((bbox_weights.size(0),
4 * num_classes))
for i in torch.nonzero(labels > 0).squeeze(-1):
start, end = labels[i] * 4, (labels[i] + 1) * 4
bbox_targets_expand[i, start:end] = bbox_targets[i, :]
bbox_weights_expand[i, start:end] = bbox_weights[i, :]
return bbox_targets_expand, bbox_weights_expand
| 39.790816
| 95
| 0.594307
| 1,038
| 7,799
| 4.092486
| 0.058767
| 0.018362
| 0.018362
| 0.024482
| 0.879002
| 0.858757
| 0.858757
| 0.850753
| 0.841808
| 0.841808
| 0
| 0.033377
| 0.312348
| 7,799
| 195
| 96
| 39.994872
| 0.758717
| 0.016541
| 0
| 0.80226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039548
| false
| 0
| 0.016949
| 0
| 0.096045
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79b39bfc6d2a8dcf7d670019103de9c573685be7
| 7,882
|
py
|
Python
|
src/tools.py
|
HumbertoGlez/CompilerProject
|
9d9469349ded324664c64c2165b812283d3babd0
|
[
"MIT"
] | 1
|
2021-04-09T22:25:40.000Z
|
2021-04-09T22:25:40.000Z
|
src/tools.py
|
HumbertoGlez/CompilerProject
|
9d9469349ded324664c64c2165b812283d3babd0
|
[
"MIT"
] | null | null | null |
src/tools.py
|
HumbertoGlez/CompilerProject
|
9d9469349ded324664c64c2165b812283d3babd0
|
[
"MIT"
] | null | null | null |
# Entregable 3: Cubo semántico de operaciones
INT = "int"
FLOAT = "float"
CHAR = "char"
STRING = "string"
ANY = "any"
TYPE_ERROR = "Undefined operator {} for types {} and {}"
semanticCube = {
INT:{
INT: {
'+':INT,
'-': INT,
'*': FLOAT,
'/': INT,
'%': INT,
'>': INT,
'<': INT,
'=': INT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': INT,
'-=': INT,
'*=': FLOAT,
'/=': INT
},
FLOAT: {
'+': FLOAT,
'-': FLOAT,
'*': FLOAT,
'/': FLOAT,
'%': FLOAT,
'>': INT,
'<': INT,
'=': INT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': INT,
'-=': INT,
'*=': INT,
'/=': INT,
},
CHAR: {
'+':INT,
'-': INT,
'*': INT,
'/': INT,
'%': INT,
'>': INT,
'<': INT,
'=': INT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': INT,
'-=': INT,
'*=': INT,
'/=': INT
},
STRING: {
'+':TYPE_ERROR,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': TYPE_ERROR,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': TYPE_ERROR,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
}
},
FLOAT: {
INT: {
'+':FLOAT,
'-': FLOAT,
'*': FLOAT,
'/': FLOAT,
'%': FLOAT,
'>': INT,
'<': INT,
'=': FLOAT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': FLOAT,
'-=': FLOAT,
'*=': FLOAT,
'/=': FLOAT
},
FLOAT: {
'+':FLOAT,
'-': FLOAT,
'*': FLOAT,
'/': FLOAT,
'%': FLOAT,
'>': INT,
'<': INT,
'=': FLOAT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': FLOAT,
'-=': FLOAT,
'*=': FLOAT,
'/=': FLOAT
},
CHAR: {
'+':FLOAT,
'-': FLOAT,
'*': FLOAT,
'/': FLOAT,
'%': FLOAT,
'>': INT,
'<': INT,
'=': FLOAT,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': FLOAT,
'-=': FLOAT,
'*=': FLOAT,
'/=': FLOAT
},
STRING: {
'+':TYPE_ERROR,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': TYPE_ERROR,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': TYPE_ERROR,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
}
},
CHAR: {
INT: {
'+':INT,
'-': INT,
'*': INT,
'/': INT,
'%': INT,
'>': INT,
'<': INT,
'=': CHAR,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': CHAR,
'-=': CHAR,
'*=': CHAR,
'/=': CHAR
},
FLOAT: {
'+':FLOAT,
'-': FLOAT,
'*': FLOAT,
'/': FLOAT,
'%': FLOAT,
'>': INT,
'<': INT,
'=': CHAR,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': CHAR,
'-=': CHAR,
'*=': CHAR,
'/=': CHAR
},
CHAR: {
'+':INT,
'-': INT,
'*': INT,
'/': INT,
'%': INT,
'>': INT,
'<': INT,
'=': CHAR,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': CHAR,
'-=': CHAR,
'*=': CHAR,
'/=': CHAR
},
STRING: {
'+': STRING,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': TYPE_ERROR,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': TYPE_ERROR,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
}
},
STRING: {
INT: {
'+': TYPE_ERROR,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': TYPE_ERROR,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': TYPE_ERROR,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
},
FLOAT: {
'+': TYPE_ERROR,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': TYPE_ERROR,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': TYPE_ERROR,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
},
CHAR: {
'+': STRING,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': TYPE_ERROR,
'<': TYPE_ERROR,
'=': STRING,
'<=': TYPE_ERROR,
'>=': TYPE_ERROR,
'==': TYPE_ERROR,
'!=': TYPE_ERROR,
'+=': STRING,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
},
STRING: {
'+': STRING,
'-': TYPE_ERROR,
'*': TYPE_ERROR,
'/': TYPE_ERROR,
'%': TYPE_ERROR,
'>': INT,
'<': INT,
'=': STRING,
'<=': INT,
'>=': INT,
'==': INT,
'!=': INT,
'+=': STRING,
'-=': TYPE_ERROR,
'*=': TYPE_ERROR,
'/=': TYPE_ERROR
}
}
}
def operation_result_type(left_type, right_type, oper):
if not left_type in semanticCube:
raise ValueError("{} does not exist".format(left_type))
elif not right_type in semanticCube[left_type]:
raise ValueError("{} does not exist".format(right_type))
elif not oper in semanticCube[left_type][right_type]:
raise ValueError("{} does not exist for {} and {}".format(oper, left_type, right_type))
if semanticCube[left_type][right_type][oper] == TYPE_ERROR:
raise TypeError(TYPE_ERROR.format(oper, left_type, right_type))
return(semanticCube[left_type][right_type][oper])
| 24.554517
| 95
| 0.28508
| 486
| 7,882
| 4.374486
| 0.074074
| 0.431797
| 0.544214
| 0.753528
| 0.857949
| 0.836312
| 0.727658
| 0.719661
| 0.69191
| 0.689558
| 0
| 0.000262
| 0.515986
| 7,882
| 320
| 96
| 24.63125
| 0.557012
| 0.005455
| 0
| 0.875796
| 0
| 0
| 0.065204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003185
| false
| 0
| 0
| 0
| 0.003185
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8dc793c9dcdf3a9eccd1e24016416a1a8b114f3d
| 12,974
|
py
|
Python
|
teaser/data/output/buildingelement_output.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | 1
|
2018-10-22T07:21:15.000Z
|
2018-10-22T07:21:15.000Z
|
teaser/data/output/buildingelement_output.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | null | null | null |
teaser/data/output/buildingelement_output.py
|
Ja98/TEASER
|
1bb782a01ce1b38c4abecb9c6ecc4d59f1ba21a3
|
[
"MIT"
] | null | null | null |
# Created April 2016
# TEASER Development Team
"""buildingelement_ouput.py
This module contains function to save building element classes
"""
import teaser.data.bindings.v_0_6.typeelement_bind as tb_bind
import teaser.logic.utilities as utilities
import warnings
import pyxb
def save_type_element(element, data_class):
"""Typical element saver.
Saves typical building elements according to their construction
year and their construction type in the XML file for type building
elements. If the Project parent is set, it automatically saves it to
the file given in Project.data. Alternatively you can specify a path to
a file of TypeBuildingElements. If this file does not exist,
a new file is created.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
data_class : DataClass()
DataClass containing the bindings for TypeBuildingElement and
Material (typically this is the data class stored in prj.data,
but the user can individually change that.
"""
element_binding = data_class.element_bind
element_binding.version = "0.6"
add_to_xml = True
pyxb.utils.domutils.BindingDOMSupport.DeclareNamespace(
tb_bind.Namespace, 'elements')
warning_text = ("Construction Type and building age "
"group already exist in this XML, consider revising "
"your inputs. The Element is NOT saved into XML")
if type(element).__name__ == "OuterWall":
for check in element_binding.OuterWall:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.OuterWallType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.OuterWall.append(pyxb_wall)
if type(element).__name__ == "Door":
for check in element_binding.Door:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.DoorType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.Door.append(pyxb_wall)
elif type(element).__name__ == 'InnerWall':
for check in element_binding.InnerWall:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.InnerWallType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.InnerWall.append(pyxb_wall)
elif type(element).__name__ == 'Ceiling':
for check in element_binding.Ceiling:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.CeilingType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.Ceiling.append(pyxb_wall)
elif type(element).__name__ == 'Floor':
for check in element_binding.Floor:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.FloorType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.Floor.append(pyxb_wall)
elif type(element).__name__ == 'GroundFloor':
for check in element_binding.GroundFloor:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.GroundFloorType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.GroundFloor.append(pyxb_wall)
elif type(element).__name__ == 'Rooftop':
for check in element_binding.Rooftop:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.RooftopType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.Rooftop.append(pyxb_wall)
elif type(element).__name__ == 'Window':
for check in element_binding.Window:
if check.building_age_group == element.building_age_group and\
check.construction_type == element.construction_type:
warnings.warn(warning_text)
add_to_xml = False
break
if add_to_xml is True:
pyxb_wall = tb_bind.WindowType()
_set_basic_data_pyxb(element=element,
pyxb_class=pyxb_wall)
pyxb_wall.Layers = tb_bind.LayersType()
_set_layer_data_pyxb(element=element,
pyxb_class=pyxb_wall)
element_binding.Window.append(pyxb_wall)
if add_to_xml is True:
out_file = open(utilities.get_full_path(data_class.path_tb), "w")
out_file.write(element_binding.toDOM().toprettyxml())
def delete_type_element(element, data_class):
"""Deletes typical element.
Deletes typical building elements according to their construction
year and their construction type in the the XML file for type building
elements. If the Project parent is set, it automatically saves it to
the file given in Project.data. Alternatively you can specify a path to
a file of TypeBuildingElements. If this file does not exist,
a new file is created.
Parameters
----------
element : BuildingElement()
Instance of BuildingElement or inherited Element of TEASER
data_class : DataClass()
DataClass containing the bindings for TypeBuildingElement and
Material (typically this is the data class stored in prj.data,
but the user can individually change that.
"""
element_binding = data_class.element_bind
if type(element).__name__ == "OuterWall":
for check in element_binding.OuterWall:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.OuterWall.remove(check)
break
if type(element).__name__ == "Door":
for check in element_binding.Door:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.Door.remove(check)
break
elif type(element).__name__ == 'InnerWall':
for check in element_binding.InnerWall:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.InnerWall.remove(check)
break
elif type(element).__name__ == 'Ceiling':
for check in element_binding.Ceiling:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.Ceiling.remove(check)
break
elif type(element).__name__ == 'Floor':
for check in element_binding.Floor:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.Floor.remove(check)
break
elif type(element).__name__ == 'GroundFloor':
for check in element_binding.GroundFloor:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.GroundFloor.remove(check)
break
elif type(element).__name__ == 'Rooftop':
for check in element_binding.Rooftop:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.Rooftop.remove(check)
break
elif type(element).__name__ == 'Window':
for check in element_binding.Window:
if check.building_age_group == element.building_age_group and \
check.construction_type == element.construction_type:
element_binding.Window.remove(check)
break
out_file = open(utilities.get_full_path(data_class.path_tb), "w")
out_file.write(element_binding.toDOM().toprettyxml())
def _set_basic_data_pyxb(element, pyxb_class):
"""Helper function for save_type_element to set the layer data.
Parameters
----------
pyxb_class :
Pyxb class representation of xml
"""
pyxb_class.building_age_group = element.building_age_group
pyxb_class.construction_type = element.construction_type
pyxb_class.inner_radiation = element.inner_radiation
pyxb_class.inner_convection = element.inner_convection
if type(element).__name__ == 'InnerWall' or \
type(element).__name__ == 'Ceiling' or \
type(element).__name__ == 'Floor' or \
type(element).__name__ == 'GroundFloor':
pass
elif type(element).__name__ == 'Window':
pyxb_class.outer_radiation = element.outer_radiation
pyxb_class.outer_convection = element.outer_convection
pyxb_class.g_value = element.g_value
pyxb_class.a_conv = element.a_conv
pyxb_class.shading_g_total = element.shading_g_total
pyxb_class.shading_max_irr = element.shading_max_irr
elif type(element).__name__ == 'OuterWall' or\
type(element).__name__ == 'Rooftop' or\
type(element).__name__ == 'Door':
pyxb_class.outer_radiation = element.outer_radiation
pyxb_class.outer_convection = element.outer_convection
def _set_layer_data_pyxb(element, pyxb_class):
"""Helper function for save_type_element to set the layer data.
Parameters
----------
pyxb_class
pyxb class representation of xml
"""
for layer in element.layer:
pyxb_layer = tb_bind.layerType()
pyxb_layer.id = layer.id
pyxb_layer.thickness = layer.thickness
pyxb_layer.material = layer.material.name
pyxb_layer.material.material_id = layer.material.material_id
pyxb_class.Layers.append(pyxb_layer)
| 35.839779
| 75
| 0.632573
| 1,479
| 12,974
| 5.205544
| 0.108857
| 0.075724
| 0.072737
| 0.050786
| 0.81569
| 0.798285
| 0.796207
| 0.767762
| 0.767762
| 0.767762
| 0
| 0.000878
| 0.297672
| 12,974
| 361
| 76
| 35.939058
| 0.844052
| 0.139201
| 0
| 0.715596
| 0
| 0
| 0.029066
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018349
| false
| 0.004587
| 0.018349
| 0
| 0.036697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8df5932db8e335d8b0d3e48fa9e43c40998e3e40
| 370,781
|
py
|
Python
|
archiv/models.py
|
acdh-oeaw/4dpuzzle
|
7856bbd82c7dfa8da1d5f1ad40593219a35b3cfe
|
[
"MIT"
] | null | null | null |
archiv/models.py
|
acdh-oeaw/4dpuzzle
|
7856bbd82c7dfa8da1d5f1ad40593219a35b3cfe
|
[
"MIT"
] | 6
|
2020-06-05T18:32:02.000Z
|
2022-02-10T07:22:24.000Z
|
archiv/models.py
|
acdh-oeaw/4dpuzzle
|
7856bbd82c7dfa8da1d5f1ad40593219a35b3cfe
|
[
"MIT"
] | 1
|
2020-06-30T13:52:41.000Z
|
2020-06-30T13:52:41.000Z
|
# generated by appcreator
from django.db import models
from django.urls import reverse
from django.contrib.postgres.fields import DateRangeField
from vocabs.models import SkosConcept
from browsing.browsing_utils import model_to_dict
def set_extra(self, **kwargs):
self.extra = kwargs
return self
models.Field.set_extra = set_extra
class Actor(models.Model):
""" Person involved in TD excavations and/or A Puzzle in 4D project """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
canonic_arche_uri = models.TextField(
blank=True,
verbose_name="authority file URI"
).set_extra(
is_public=True,
arche_prop="hasIdentifier"
)
name = models.CharField(
max_length=250,
blank=True,
verbose_name="Name",
help_text="helptext for name",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__first_name",
arche_prop="hasAlternativeTitle"
)
drawer_monogram = models.CharField(
max_length=250,
blank=True,
verbose_name="Drawer Monogram",
help_text="helptext for drawer_monogram",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__drawer_monogram",
)
excavation = models.CharField(
max_length=250,
blank=True,
verbose_name="Excavation",
help_text="helptext for excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__Excavation",
)
xx_4dpuzzle = models.CharField(
max_length=250,
blank=True,
verbose_name="4DPuzzle",
help_text="helptext for xx_4dpuzzle",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__4DPuzzle",
)
year = models.CharField(
max_length=250,
blank=True,
verbose_name="Year",
help_text="helptext for year",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__year",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_actor_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Actor/Actor.csv__Access",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'name',
]
verbose_name = "Actor"
def __str__(self):
if self.name:
return "{}".format(self.name)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:actor_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:actor_create')
def get_absolute_url(self):
return reverse('archiv:actor_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:actor_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:actor_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:actor_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:actor_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:actor_detail',
kwargs={'pk': prev.first().id}
)
return False
class ArchaeologicalObject4DPuzzleID(models.Model):
""" A 4DPuzzleID was created for archaeological objects that did not have an ID """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_archaeologicalobject4dpuzzleid_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Creator_metadata",
)
archaeological_object_id = models.ForeignKey(
"ArchaeologicalObjectID",
related_name='rvn_archaeologicalobject4dpuzzleid_archaeological_object_id_archaeologicalobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Archaeological_object_ID",
)
archaeological_object_4dpuzzle_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Archaeological object 4DPuzzle ID",
help_text="helptext for archaeological_object_4dpuzzle_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Archaeological_object_4DPuzzle_ID",
)
archaeological_object_comment = models.TextField(
blank=True, null=True,
verbose_name="Archaeological object comment",
help_text="helptext for archaeological_object_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Archaeological_object_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_archaeologicalobject4dpuzzleid_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Excavation_object_ID",
)
position = models.TextField(
blank=True, null=True,
verbose_name="Position",
help_text="helptext for position",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Position",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Stratum_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Digitisation_comment",
)
archaeological_object_type = models.ForeignKey(
SkosConcept,
related_name='rvn_archaeologicalobject4dpuzzleid_archaeological_object_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object type",
help_text="helptext for archaeological_object_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Archaeological_object_type",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_archaeologicalobject4dpuzzleid_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_archaeologicalobject4dpuzzleid_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Stratum_ID_absolute_prepub",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_archaeologicalobject4dpuzzleid_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObject4DPuzzleID/Archaeological_object_4DPuzzle.scv__Phase_ID",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'archaeological_object_id',
]
verbose_name = "ArchaeologicalObject4DPuzzleID"
def __str__(self):
if self.archaeological_object_id:
return "{}".format(self.archaeological_object_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_create')
def get_absolute_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:archaeologicalobject4dpuzzleid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:archaeologicalobject4dpuzzleid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:archaeologicalobject4dpuzzleid_detail',
kwargs={'pk': prev.first().id}
)
return False
class ArchaeologicalObjectID(models.Model):
""" ID of archaeological object """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_archaeologicalobjectid_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Creator_metadata",
)
archaeological_object_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Archaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Archaeological_object_ID",
)
archaeological_object_comment = models.TextField(
blank=True, null=True,
verbose_name="Archaeological object comment",
help_text="helptext for archaeological_object_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Archaeological_object_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_archaeologicalobjectid_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Excavation_object_ID",
)
position = models.CharField(
max_length=250,
blank=True,
verbose_name="Position",
help_text="helptext for position",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Position",
)
stratum_id_relative = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Stratum_ID_absolute_prepub",
)
stratum_comment = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Stratum_comment",
)
phase_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Phase_ID",
)
corresponding_to_archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_archaeologicalobjectid_corresponding_to_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Corresponding to archaeological object ID",
help_text="helptext for corresponding_to_archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Corresponding_to_archaeological_object_ID",
)
relatedto = models.CharField(
max_length=250,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__RelatedTo",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Digitisation_comment",
)
archaeological_object_type = models.ForeignKey(
SkosConcept,
related_name='rvn_archaeologicalobjectid_archaeological_object_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object type",
help_text="helptext for archaeological_object_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchaeologicalObjectID/Archaeological_object_ID.csv__Archaeological_object_type",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'archaeological_object_id',
]
verbose_name = "ArchaeologicalObjectID"
def __str__(self):
if self.archaeological_object_id:
return "{}".format(self.archaeological_object_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:archaeologicalobjectid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:archaeologicalobjectid_create')
def get_absolute_url(self):
return reverse('archiv:archaeologicalobjectid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:archaeologicalobjectid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:archaeologicalobjectid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:archaeologicalobjectid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:archaeologicalobjectid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:archaeologicalobjectid_detail',
kwargs={'pk': prev.first().id}
)
return False
class ArchiveINF(models.Model):
""" Document with information about the Tell el-Daba documentation archive """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_archiveinf_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_archiveinf_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_archiveinf_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of archival object",
help_text="helptext for creator_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__creator_archivalObject",
arche_prop="hasContributor"
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Filename",
arche_prop="hasAlternativeTitle"
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Document_title",
arche_prop="hasAlternativeTitle"
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Creation_year_original",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Creation_date_archivalObject",
arche_prop="hasCreatedDate"
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Creation_date_metadata",
)
comment = models.TextField(
blank=True, null=True,
verbose_name="Comment",
help_text="helptext for comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Comment",
arche_prop="hasNote"
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_archiveinf_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Document_type",
)
relatedto = models.ForeignKey(
"DocumentTypes",
related_name='rvn_archiveinf_relatedto_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__RelatedTo",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_archiveinf_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__File_extension_archivalObject",
arche_prop="hasTechnicalInfo"
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_archiveinf_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_archiveinf_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Access",
arche_prop="hasAccessRestriction"
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_archiveinf_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_ArchiveINF/ArchiveINF_metadata.csv__Site_ID",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Archive information"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:archiveinf_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:archiveinf_create')
def get_absolute_url(self):
return reverse('archiv:archiveinf_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:archiveinf_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:archiveinf_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:archiveinf_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:archiveinf_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:archiveinf_detail',
kwargs={'pk': prev.first().id}
)
return False
class AutoCAD(models.Model):
""" AutoCAD Files """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_autocad_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Creator_metadata",
arche_prop="hasMetadataCreator"
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_autocad_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Creator_original",
arche_prop="hasCreator"
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_autocad_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__creator_archivalObject",
arche_prop="hasContributor"
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Filename",
arche_prop="hasAlternativeTitle"
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Document_title",
arche_prop="hasAlternativeTitle"
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Path_filename_old",
)
path_filename_arche = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in ARCHE",
help_text="helptext for path_filename_arche",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Path_filename_ARCHE",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Creation_year_original",
arche_prop="hasCreatedDate"
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation year archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Creation_date_metadata",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_autocad_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_autocad_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Archaeological_object_ID",
)
relatedto = models.CharField(
max_length=250,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__RelatedTo",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Digitisation_comment",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_autocad_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Document_type",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Access",
arche_prop="hasAccessRestriction"
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Site_ID",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_autocad_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_AutoCAD/AutoCAD_metadata__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "AutoCAD"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:autocad_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:autocad_create')
def get_absolute_url(self):
return reverse('archiv:autocad_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:autocad_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:autocad_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:autocad_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:autocad_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:autocad_detail',
kwargs={'pk': prev.first().id}
)
return False
class Convolutecards(models.Model):
""" Digitised convolute cards """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_convolutecards_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creator_metadata",
arche_prop="hasMetadataCreator"
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_convolutecards_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creator_original",
arche_prop="hasCreator"
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_convolutecards_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creator_scan",
arche_prop="hasDigitisingAgent"
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_convolutecards_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Document_type",
)
excavation_id = models.ManyToManyField(
"ExcavationSeasons",
related_name='rvn_convolutecards_excavation_id_excavationseasons',
blank=True,
verbose_name="Excavation Season",
help_text="helptext for excavation_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Excavation_id",
)
creation_year_original = models.TextField(
blank=True, null=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creation_year_original",
)
season = models.TextField(
blank=True, null=True,
verbose_name="Season",
help_text="helptext for season",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Season",
)
filename_document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="The filename of convolute cards consists of the document_ID (unique identifier). The document ID is a project-specific unique identifier which consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. KK for Konvolutkarte) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Filename_Document_ID",
arche_prop="hasAlternativeTitle",
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Inventory number of the convolute",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Convolute_inventory_number",
)
convolute_subnumber = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute subnumber",
help_text="helptext for convolute_subnumber",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Convolute_subnumber",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP convolute subnumber: <value>",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Filename_old",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creation_date_original",
arche_prop="hasCreatedDateOriginal",
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Creation_date_metadata",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder of original document",
help_text="helptext for storage_folder_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Storage_folder_original",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
month = models.CharField(
max_length=250,
blank=True,
verbose_name="Month",
help_text="helptext for month",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Month",
)
position = models.CharField(
max_length=250,
blank=True,
verbose_name="Position",
help_text="helptext for position",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Position",
)
lowest_height_meters_standard_elevation_zero = models.CharField(
max_length=250,
blank=True,
verbose_name="lowest_height_meters_standard_elevation_zero",
help_text="helptext for lowest_height_meters_standard_elevation_zero",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Lowest_height_meters_standard_elevation_zero",
)
maximum_height_meters_standard_elevation_zero = models.CharField(
max_length=250,
blank=True,
verbose_name="maximum_height_meters_standard_elevation_zero",
help_text="helptext for maximum_height_meters_standard_elevation_zero",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Maximum_height_meters_standard_elevation_zero",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment used for scanning",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Source__original_copy_edited-copy",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Original_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_convolutecards_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Konvolutkarten/Convolute_ID.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename_document_id',
]
verbose_name = "Convolute cards"
def __str__(self):
if self.filename_document_id:
return "{}".format(self.filename_document_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:convolutecards_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:convolutecards_create')
def get_absolute_url(self):
return reverse('archiv:convolutecards_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:convolutecards_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:convolutecards_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:convolutecards_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:convolutecards_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:convolutecards_detail',
kwargs={'pk': prev.first().id}
)
return False
class Datenbase(models.Model):
""" Database files """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_datenbase_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_datenbase_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_datenbase_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__creator_archivalObject",
arche_prop="hasContributor",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Document_title",
arche_prop="hasAlternativeTitle",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Creation_year_original",
arche_prop="hasCreatedDate",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation year archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Creation_date_metadata",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Path_filename_old",
)
path_filename_arche = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in ARCHE",
help_text="helptext for path_filename_arche",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Path_filename_ARCHE",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_datenbase_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_datenbase_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Archaeological_object_ID",
)
relatedto = models.CharField(
max_length=250,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__RelatedTo",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Digitisation_comment",
arche_prop="hasNote",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_datenbase_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Document_type",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Site_ID",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Find_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_datenbase_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Datenbanken/Database_metadata__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Database"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:datenbase_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:datenbase_create')
def get_absolute_url(self):
return reverse('archiv:datenbase_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:datenbase_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:datenbase_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:datenbase_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:datenbase_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:datenbase_detail',
kwargs={'pk': prev.first().id}
)
return False
class Document4DPuzzleID(models.Model):
""" A 4DPuzzleID was created for documents that did not have an ID """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_document4dpuzzleid_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Creator_metadata",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_document4dpuzzleid_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Document_type",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="helptext for document_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Document_ID",
)
original_4dpuzzle_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="helptext for original_4dpuzzle_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Original_4DPuzzle_ID",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="helptext for document_title",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Document_title",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Digitisation_comment",
)
corresponding_to = models.CharField(
max_length=250,
blank=True,
verbose_name="corresponding_to",
help_text="helptext for corresponding_to",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Document_4DPuzzleID/Document_4DPuzzleID.csv__Corresponding_to",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'document_id',
]
verbose_name = "Document 4DPuzzle ID"
def __str__(self):
if self.document_id:
return "{}".format(self.document_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:document4dpuzzleid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:document4dpuzzleid_create')
def get_absolute_url(self):
return reverse('archiv:document4dpuzzleid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:document4dpuzzleid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:document4dpuzzleid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:document4dpuzzleid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:document4dpuzzleid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:document4dpuzzleid_detail',
kwargs={'pk': prev.first().id}
)
return False
class DocumentTypes(models.Model):
""" Types of documents """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
document_type = models.CharField(
max_length=250,
blank=True,
verbose_name="Document type",
help_text="Type of document.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__Document_type",
)
document_maintype = models.CharField(
max_length=250,
blank=True,
verbose_name="Document type",
help_text="Type of document.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__Document_maintype",
)
dt_abbr = models.CharField(
max_length=250,
blank=True,
verbose_name="Document type abbreviated",
help_text="Abbreviation of the document.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__DT_abbr",
)
document_subtype = models.CharField(
max_length=250,
blank=True,
verbose_name="Document Subtype",
help_text="Subtype of a document. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__Document_subtype",
)
ds_abbr = models.CharField(
max_length=250,
blank=True,
verbose_name="Document subtype abbreviated",
help_text="Abbreviation of the document subtype.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__DS_abbr",
)
description = models.TextField(
blank=True, null=True,
verbose_name="Description",
help_text="Description of document type.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__Description",
)
analogue_borndigital = models.ForeignKey(
SkosConcept,
related_name='rvn_documenttypes_analogue_borndigital_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Analogue or born-digital",
help_text="Whether the original document was analogue (and digitised during A Puzzle in 4D project) or born-digital (and converted into durable file format during A Puzzle in 4D project).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_DocumentTypes/Tabelle1.csv__Analog_bornDigital",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'document_type',
]
verbose_name = "Document types"
def __str__(self):
if self.document_type:
return "{}".format(self.document_type)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:documenttypes_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:documenttypes_create')
def get_absolute_url(self):
return reverse('archiv:documenttypes_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:documenttypes_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:documenttypes_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:documenttypes_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:documenttypes_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:documenttypes_detail',
kwargs={'pk': prev.first().id}
)
return False
class ExcavationObjectID(models.Model):
""" ID of excavation object (area, square etc.) """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_excavationobjectid_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="creator_metadata",
help_text="Person who created the metadata or organization where metadata creation was carried out.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Creator_metadata",
)
excavation_object_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Identifier of Excavation Object",
help_text="Identifier of an excavation object (excavation objects are objects that were created during excavation). Consists of Site_area_square_TypeOfObject, for example TD_A-II_l17_Planum1.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Excavation_object_ID",
)
profile_orientation = models.CharField(
max_length=250,
blank=True,
verbose_name="Orientation of a profile",
help_text="The orientation of a profile.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Profile_orientation",
)
excavation_id = models.ManyToManyField(
"ExcavationSeasons",
related_name='rvn_excavationobjectid_excavation_id_excavationseasons',
blank=True,
verbose_name="Excavation Season",
help_text="Years during work at an excavation object has been carried out.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Excavation_id",
)
year = models.TextField(
blank=True, null=True,
verbose_name="Year",
help_text="Years during work at an excavation object has been carried out.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Year",
)
season = models.TextField(
blank=True, null=True,
verbose_name="Season",
help_text="Season during work at an excavation object has been carried out.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Season",
)
part_of_excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_excavationobjectid_part_of_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Part of another Excavation Object.",
help_text="An excavation object which was part of another excavation object.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Part_of_excavation_object_ID",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments of the metadata creator (e.g. noticing errors, etc.).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Digitisation_comment",
)
excavation_object_type = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationobjectid_excavation_object_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Type of Excavation Object",
help_text="Types of excavation objects: Areal, Detail, Grube, Oberflaeche, Planquadrat, Planum, Profil, Profilsteg, Schnitt, Situation, Sondage, Zwischenplanum.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Excavation_object_type",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationobjectid_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of the name of the archaeological site, which is documented in the field drawing. ‘TD’ stands for Tell el-Daba.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Site_ID",
)
area = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationobjectid_area_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Area",
help_text="Excavations were carried out in 16 areas: A-I, A-II, A-III, A-IV, A-N, A-V, E-I, F-I, F-II, H-I, H-II, H-III, H-IV, H-V, H-VI, R-I.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Area",
)
square_trench = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationobjectid_square_trench_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Square trench",
help_text="Each excavation area has been divided into square trenches.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Square_trench",
)
planum = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationobjectid_planum_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Planum",
help_text="Excavations were carried out in spits and a ‘planum’ is an excavation surface. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Metadaten/Excavation_object_ID.csv__Planum",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'excavation_object_id',
]
verbose_name = "Excavation Objects"
def __str__(self):
if self.excavation_object_id:
return "{}".format(self.excavation_object_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:excavationobjectid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:excavationobjectid_create')
def get_absolute_url(self):
return reverse('archiv:excavationobjectid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:excavationobjectid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:excavationobjectid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:excavationobjectid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:excavationobjectid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:excavationobjectid_detail',
kwargs={'pk': prev.first().id}
)
return False
class ExcavationSeasons(models.Model):
""" Excavation season """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
excavation_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Ecxcavation ID",
help_text="helptext for excavation_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__Excavation_id",
)
grabungskampagnen = models.CharField(
max_length=250,
blank=True,
verbose_name="Excavations Seasons",
help_text="helptext for grabungskampagnen",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__Grabungskampagnen",
)
start_date_end_date = DateRangeField(
blank=True, null=True,
verbose_name="Start date - end date",
help_text="helptext for start_date_end_date",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__start-date/end-date",
)
year = models.CharField(
max_length=250,
blank=True,
verbose_name="Year",
help_text="helptext for year",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__Year",
)
season = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationseasons_season_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Season",
help_text="helptext for season",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__Season",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_excavationseasons_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_ExcavationSeasons/ExcavationSeasons.csv__Access",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'grabungskampagnen',
]
verbose_name = "Excavation Seasons"
def __str__(self):
if self.grabungskampagnen:
return "{}".format(self.grabungskampagnen)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:excavationseasons_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:excavationseasons_create')
def get_absolute_url(self):
return reverse('archiv:excavationseasons_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:excavationseasons_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:excavationseasons_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:excavationseasons_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:excavationseasons_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:excavationseasons_detail',
kwargs={'pk': prev.first().id}
)
return False
class Fielddrawing(models.Model):
""" Digitised fielddrawing """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of document_ID and document_title, separated by two underscores. For example file name ‘TD_FZ_1234__TD_F-I_j21_Planum1’ consists of the document_ID ‘TD_FZ_1234’ which is separated by two underscores from the document title describing the contents of the document ‘TD(Tell el-Daba)_F/I(area)_j21(square)_ Planum 1’.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document which was scanned. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (FZ for Feldzeichnung) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234). For example document ID ‘TD_FZ_1234’ means ‘Tell el-Daba_field drawing_inventory number 1234’).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. For field drawings the document title consists of abbreviation for site_excavation area_square trench_content of field drawing (e.g.: TD_F-I_j21_Planum1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Document_title",
)
document_type = models.ManyToManyField(
"DocumentTypes",
related_name='rvn_fielddrawing_document_type_documenttypes',
blank=True,
verbose_name="Document type",
help_text="Type of document – for field drawing metadata this is always ‘Feldzeichnung’ (Fielddrawing).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Document_type",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="Date when the field drawing was made.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creation_date_original",
arche_prop="hasCreatedDateOriginal",
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date scan",
help_text="Date when the scan was made.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="Date when metadata was created.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creation_date_metadata",
)
creator_metadata = models.ManyToManyField(
"Actor",
related_name='rvn_fielddrawing_creator_metadata_actor',
blank=True,
verbose_name="Creator of metadata",
help_text="Person who created the metadata.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ManyToManyField(
"Actor",
related_name='rvn_fielddrawing_creator_original_actor',
blank=True,
verbose_name="Creator of original",
help_text="Person who created the original field drawing.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creator_original",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Title of the folder where the original fielddrawing is kept",
help_text="The text on the label of the folder in the analogue TD archive, where the original is held.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Storage_folder_original",
)
resolution_scan_ppi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="Scan resolution settings. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
original_material = models.ManyToManyField(
SkosConcept,
related_name='rvn_fielddrawing_original_material_skosconcept',
blank=True,
verbose_name="Material of original document",
help_text="Material of original (Millimetrepaper (Millimeterpapier), Transparentpapier (tracing paper), Kopierpapier (photocopy)).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Original_material",
)
original_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Inventory number of original",
help_text="Inventory number of the original fielddrawing. An inventory number was given to each field drawing during the excavations. The inventory number is part of the unique identifier of the field drawing. If a field drawing did not have an inventory number, or there was an error with the inventory number, then a new inventory number consisting of the project name ‘4DPuzzle’ and a running number was created, e.g.: 4DPuzzle1234). The list of the new inventory numbers is kept in the Excel file ‘Metadaten.xlsl’, worksheet ‘Resource_4DPuzzle_number’).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Original_inventory_number",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP inventory number of original: <value>",
)
find_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Inventory number of a find drawn on the fielddrawing",
help_text="Inventory number of a find which is shown on the fielddrawing.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Find_inventory_number",
)
amendment_drawn_by = models.ManyToManyField(
"Actor",
related_name='rvn_fielddrawing_amendment_drawn_by_actor',
blank=True,
verbose_name="Drawer of amendment to the fielddrawing",
help_text="Person who made amendments to the field drawing.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Amendment_drawn_by",
)
amendment_date = models.CharField(
max_length=250,
blank=True,
verbose_name="Amendment date",
help_text="Date when the amendment was made.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Amendment_date",
)
drawer_monogram = models.ManyToManyField(
"Actor",
related_name='rvn_fielddrawing_drawer_monogram_actor',
blank=True,
verbose_name="Monogram of drawer",
help_text="Monogram of the person who drew the field drawing. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Drawer_monogram",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fielddrawing_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_fielddrawing_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text=" ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Archaeological_object_ID",
)
stratum_id_relative = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum (relative)",
help_text="Unique identifier of a relative stratum. Relative stratum is a group of stratigraphic units which are thought to belong to a chronological phase (the ID contains: abbreviation of site_excavation area_relative stratum e.g.: TD_F-I_a is the ID of stratum a in area F-I in Tell el-Daba).",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum (absolute)",
help_text="Unique identifier of an absolute stratum. An absolute stratum is a group of stratigraphic units which were confirmed to belong to a chronological phase during post-excavation analysis but before publication (the ID contains: abbreviation of site_excavation area_absolute stratum e.g.: TD_F-I_A is the ID of the absolute stratum A in area F-I in Tell el-Daba). ",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Stratum_ID_absolute_prepub",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum (comment)",
help_text="Transcript of the handwritten comments and notes on the stratum written on the field drawing. ",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Stratum_comment",
)
month = models.CharField(
max_length=250,
blank=True,
verbose_name="Fieldwork month",
help_text="Month when the field drawing was made.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Month",
)
scale = models.CharField(
max_length=250,
blank=True,
verbose_name="Scale of drawing",
help_text="Drawing scale of the field drawing.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Scale",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="Scale: <value>",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Transcript of additional information found on the field drawing.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from creation of the scan (e.g. noticing of measurement errors, etc.) ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Digitisation_comment",
arche_prop="hasNote",
)
excavation_id = models.ManyToManyField(
"ExcavationSeasons",
related_name='rvn_fielddrawing_excavation_id_excavationseasons',
blank=True,
verbose_name="Excavation Season",
help_text="helptext for excavation_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Excavation_id",
)
creation_year_original = models.TextField(
blank=True, null=True,
verbose_name="Creation year of original document",
help_text="Year when the field drawing was made.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Creation_year_original",
)
season = models.TextField(
blank=True, null=True,
verbose_name="Fieldwork season",
help_text="Fieldwork season when the field drawing was made (H = Herbst = autumn; F = Frühling = spring).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Season",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension",
help_text="File extension of the scan.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="Copyright holder of the document. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Scanner",
help_text="The scanner which was used (brand, product name and number).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Scan was either made from an original fielddrawing, a copy of a fielddrawing or copy of a fielddrawing that was edited",
help_text="The original document was either a original field drawing, a photocopy of a field drawing or an edited photocopy of a field drawing (with handwritten comments).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Source__original_copy_edited_copy",
)
creator_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_creator_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="Organisation who carried out the scanning.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-II/Fielddrawings.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_fielddrawing_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="When the document was created. Field drawings were always created in the field, so the entry is always ‘excavation’.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Feldzeichnungen_F-I/Fielddrawings.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Fielddrawing"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def is_binary_class(self):
return True
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:fielddrawing_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fielddrawing_create')
def get_absolute_url(self):
return reverse('archiv:fielddrawing_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fielddrawing_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fielddrawing_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fielddrawing_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fielddrawing_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fielddrawing_detail',
kwargs={'pk': prev.first().id}
)
return False
class Film(models.Model):
""" Analogue photographic film negatives """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
film_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Film ID",
help_text="The film ID is a project-specific unique identifier. The film IDs consist of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. SWnegfilm for black &white negative film, FDfilm for colour slide film, FDdig for colour slide film digitised ) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Film_ID",
)
film_number = models.IntegerField(
blank=True, null=True,
verbose_name="Film number",
help_text="helptext for film_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Film_number",
)
addition_film_identifier = models.CharField(
max_length=250,
blank=True,
verbose_name="Addition film identifier",
help_text="helptext for addition_film_identifier",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Addition_film_identifier",
)
foto_numbers_missing = models.CharField(
max_length=250,
blank=True,
verbose_name="Foto numbers missing",
help_text="helptext for foto_numbers_missing",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Foto_numbers_missing",
)
decomposition_phenomenon = models.TextField(
blank=True, null=True,
verbose_name="Decomposition phenomenon",
help_text="The films were visually examined if they show signs of damage and decomposition. This field contains a description of the results.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Decomposition_phenomenon",
)
acetic_acid_smell = models.CharField(
max_length=250,
blank=True,
verbose_name="Acetic acid smell",
help_text="If acidic smell could be identified it is noted here.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Acetic_acid_smell",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder original",
help_text="Inscription visible on the label on the folder where the film is kept.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Storage_folder_original",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Digitisation_comment",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_film_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Document_type",
)
excavation_id = models.ManyToManyField(
"ExcavationSeasons",
related_name='rvn_film_excavation_id_excavationseasons',
blank=True,
verbose_name="Excavation Season",
help_text="helptext for excavation_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Excavation_id",
)
creation_year_original = models.TextField(
blank=True, null=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Creation_year_original",
)
film_format = models.ForeignKey(
SkosConcept,
related_name='rvn_film_film_format_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Film format",
help_text="helptext for film_format",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Film_format",
)
film_brand = models.ForeignKey(
SkosConcept,
related_name='rvn_film_film_brand_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Film brand",
help_text="helptext for film_brand",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Film_brand",
)
equipment_camera_brand = models.ForeignKey(
SkosConcept,
related_name='rvn_film_equipment_camera_brand_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment camera brand",
help_text="helptext for equipment_camera_brand",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Equipment_camera_brand",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_film_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Filme/Films.csv__Original_material",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'film_id',
]
verbose_name = "Photographic Film"
def __str__(self):
if self.film_id:
return "{}".format(self.film_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:film_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:film_create')
def get_absolute_url(self):
return reverse('archiv:film_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:film_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:film_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:film_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:film_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:film_detail',
kwargs={'pk': prev.first().id}
)
return False
class Finddrawing(models.Model):
""" Digitised finddrawing """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_finddrawing_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_finddrawing_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_finddrawing_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
arche_prop="hasDigitisingAgent",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_finddrawing_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
)
find_inventory_number = models.ForeignKey(
"FundinventarInventarnummern",
related_name='rvn_finddrawing_find_inventory_number_fundinventarinventarnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
arche_prop="hasCreatedDateOriginal",
)
convolute_inventory_number = models.ForeignKey(
"FundinventarKonvolutnummern",
related_name='rvn_finddrawing_convolute_inventory_number_fundinventarkonvolutnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
)
bone_stone_inventory_number = models.ForeignKey(
"FundinventarSteininventar",
related_name='rvn_finddrawing_bone_stone_inventory_number_fundinventarsteininventar',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Bone or stone inventory number",
help_text="helptext for bone_stone_inventory_number",
).set_extra(
is_public=True,
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder of original document",
help_text="helptext for storage_folder_original",
).set_extra(
is_public=True,
)
equipment = models.CharField(
max_length=250,
blank=True,
verbose_name="Equiment",
help_text="helptext for equipment",
).set_extra(
is_public=True,
arche_prop="hasUsedHardware",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
find_date = models.DateField(
blank=True, null=True,
verbose_name="Find datum",
help_text="helptext for find_date",
).set_extra(
is_public=True,
)
rendered_in_ink = models.CharField(
max_length=250,
blank=True,
verbose_name="Rendered in ink",
help_text="helptext for rendered_in_ink",
).set_extra(
is_public=True,
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension ",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_finddrawing_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Finddrawing"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:finddrawing_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:finddrawing_create')
def get_absolute_url(self):
return reverse('archiv:finddrawing_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:finddrawing_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:finddrawing_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:finddrawing_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:finddrawing_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:finddrawing_detail',
kwargs={'pk': prev.first().id}
)
return False
class Findsheets(models.Model):
""" Digitised find sheets """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_findsheets_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_findsheets_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creator_original",
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_findsheets_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
archaeological_object_id = models.ForeignKey(
"ArchaeologicalObjectID",
related_name='rvn_findsheets_archaeological_object_id_archaeologicalobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Archaeological_object_ID",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_findsheets_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Document_type",
)
find_inventory_number = models.ForeignKey(
"FundinventarInventarnummern",
related_name='rvn_findsheets_find_inventory_number_fundinventarinventarnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Find_inventory_number",
)
convolute_inventory_number = models.ForeignKey(
"FundinventarKonvolutnummern",
related_name='rvn_findsheets_convolute_inventory_number_fundinventarkonvolutnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Convolute_inventory_number",
)
bone_stone_inventory_number = models.ForeignKey(
"FundinventarSteininventar",
related_name='rvn_findsheets_bone_stone_inventory_number_fundinventarsteininventar',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Bone or stone inventory number",
help_text="helptext for bone_stone_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Bone_stone_inventory_number",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Filename_old",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creation_date_original",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creation_year_original",
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creation_date_scan",
arche_prop="hasCreatedDateOriginal",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Creation_date_metadata",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_findsheets_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Excavation_object_ID",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension ",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Access",
arche_prop="hasAccessRestriction",
)
storage_original = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_storage_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Storage of original document",
help_text="helptext for storage_original",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Storage_original",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment for scan",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Source__original_copy_edited-copy",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Original_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_findsheets_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundzettel/Find_sheets.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Findsheets"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:findsheets_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:findsheets_create')
def get_absolute_url(self):
return reverse('archiv:findsheets_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:findsheets_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:findsheets_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:findsheets_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:findsheets_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:findsheets_detail',
kwargs={'pk': prev.first().id}
)
return False
class Fotoborndigital(models.Model):
""" Folder with born-digital photos """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_fotoborndigital_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
folder_name = models.CharField(
max_length=250,
blank=True,
verbose_name="Folder name",
help_text="Folder name is composed like the filenames: it consists of a folder ID and a folder title, separated by two underscores. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Folder_name",
)
folder_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Folder ID",
help_text="The project-specific unique identifier of the folder. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (DF for digital photo) and a 4DPuzzle inventory number.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Folder_ID",
)
folder_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Folder title",
help_text="A description of the content of the folder. It allows information about the contents of the file to be understood by a human being without opening it. It contains information about inventory numbers, excavation objects, find types etc.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Folder_title",
)
folder_name_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Old folder name",
help_text="helptext for folder_name_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Folder_name_old",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Path_filename_old",
)
path_filename_arche = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in ARCHE",
help_text="helptext for path_filename_arche",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Path_filename_ARCHE",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Creation_date_metadata",
)
find_inventory_number_from_to = models.CharField(
max_length=250,
blank=True,
verbose_name="Inventory number of a find ",
help_text="helptext for find_inventory_number_from_to",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Find_inventory_number|from/to",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fotoborndigital_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Excavation_object_ID",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Creation_year_original",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Digitisation_comment",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_fotoborndigital_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Document_type",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_fotoborndigital_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fotoborndigital_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fotoborndigital_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_born_digital/Fotos_born_digital.csv__Site_ID",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'folder_name',
]
verbose_name = "Fotos born digital"
def __str__(self):
if self.folder_name:
return "{}".format(self.folder_name)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fotoborndigital_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fotoborndigital_create')
def get_absolute_url(self):
return reverse('archiv:fotoborndigital_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fotoborndigital_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fotoborndigital_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fotoborndigital_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fotoborndigital_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fotoborndigital_detail',
kwargs={'pk': prev.first().id}
)
return False
class Fotosgescannt(models.Model):
""" Digitised photos """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_fotosgescannt_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_fotosgescannt_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original photo",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creator_original",
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_fotosgescannt_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old ",
help_text="helptext for filename_old",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Filename_old",
)
film_number = models.IntegerField(
blank=True, null=True,
verbose_name="Film number",
help_text="helptext for film_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Film_number",
)
photo_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Photo number",
help_text="helptext for photo_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Photo_number",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP photo number: <value>",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of analogue photo",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creation_date_original",
)
excavation_id = models.ManyToManyField(
"ExcavationSeasons",
related_name='rvn_fotosgescannt_excavation_id_excavationseasons',
blank=True,
verbose_name="Excavation Season",
help_text="helptext for excavation_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Excavation_id",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of analogue photo",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creation_year_original",
arche_prop="hasCreatedDateOriginal"
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Creation_date_metadata",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_fotosgescannt_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="Digitised photo",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Document_type",
)
resolution_scan_ppi = models.IntegerField(
blank=True, null=True,
verbose_name="Resolution of scan",
help_text="helptext for resolution_scan_ppi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Resolution_scan_ppi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> ppi",
)
pixel_size = models.CharField(
max_length=250,
blank=True,
verbose_name="Pixel size",
help_text="helptext for pixel_size",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Pixel_size",
arche_prop="hasExtent",
)
find_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find inventor number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Find_inventory_number",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fotosgescannt_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_fotosgescannt_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Archaeological_object_ID",
)
season = models.CharField(
max_length=250,
blank=True,
verbose_name="Season ",
help_text="helptext for season",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Season",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Digitisation_comment",
arche_prop="hasNote",
)
film_id = models.ForeignKey(
"Film",
related_name='rvn_fotosgescannt_film_id_film',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Film ID",
help_text="helptext for film_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Film_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP film ID: <value>",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of scan",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment used for scanning",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Source__original_copy_edited-copy",
)
archaeological_object_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_archaeological_object_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archeological object type",
help_text="helptext for archaeological_object_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Archaeological_object_type",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Find_type",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__Find_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_fotosgescannt_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fotos_gescannt/Photos.csv__excavation__post-excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Fotos gescannt"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fotosgescannt_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fotosgescannt_create')
def get_absolute_url(self):
return reverse('archiv:fotosgescannt_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fotosgescannt_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fotosgescannt_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fotosgescannt_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fotosgescannt_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fotosgescannt_detail',
kwargs={'pk': prev.first().id}
)
return False
class Fundinventar4DPuzzleID(models.Model):
""" A 4DPuzzleID was created for find inventories that did not have an ID """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
excavation_object_id = models.ForeignKey(
"ExcavationObjectID",
related_name='rvn_fundinventar4dpuzzleid_excavation_object_id_excavationobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Excavation_object_ID",
)
find_inventory_4dpuzzle_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find inventory 4DPuzzle number",
help_text="helptext for find_inventory_4dpuzzle_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_inventory_4DPuzzle_number",
)
find_local_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find local number",
help_text="helptext for find_local_number",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_local_number",
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Convolute_inventory_number",
)
corresponding_to_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Corresponding to inventory number",
help_text="helptext for corresponding_to_inventory_number",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Corresponding_to_inventory_number",
)
find_comment = models.TextField(
blank=True, null=True,
verbose_name="Find comment",
help_text="helptext for find_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_comment",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Stratum_comment",
)
find_date = models.DateField(
blank=True, null=True,
verbose_name="Find date",
help_text="helptext for find_date",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_date",
)
storage_find = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage of find",
help_text="helptext for storage_find",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Storage_find",
)
relatedto = models.ManyToManyField(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_relatedto_skosconcept',
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__RelatedTo",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_material",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Digitisation_comment",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Find_type",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Access",
)
uncertainty_excavation_digitisation = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_uncertainty_excavation_digitisation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or digital",
help_text="helptext for uncertainty_excavation_digitisation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Uncertainty__excavation_digitisation",
)
creator_metadata = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_creator_metadata_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Creator_metadata",
)
archaeological_object_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_archaeological_object_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Archaeological_object_ID",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Stratum_ID_absolute_prepub",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventar4dpuzzleid_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_4DPuzzleID/Find_inventory_4DPuzzle_number.csv__Phase_ID",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'find_inventory_4dpuzzle_number',
]
verbose_name = "Fundinventar 4DPuzzle ID"
def __str__(self):
if self.find_inventory_4dpuzzle_number:
return "{}".format(self.find_inventory_4dpuzzle_number)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fundinventar4dpuzzleid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fundinventar4dpuzzleid_create')
def get_absolute_url(self):
return reverse('archiv:fundinventar4dpuzzleid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fundinventar4dpuzzleid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fundinventar4dpuzzleid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fundinventar4dpuzzleid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fundinventar4dpuzzleid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fundinventar4dpuzzleid_detail',
kwargs={'pk': prev.first().id}
)
return False
class FundinventarInventarnummern(models.Model):
""" Inventory numbers of find inventories """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_fundinventarinventarnummern_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Creator_metadata",
)
archaeological_object_id = models.ForeignKey(
"ArchaeologicalObjectID",
related_name='rvn_fundinventarinventarnummern_archaeological_object_id_archaeologicalobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Archaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Archaeological_object_ID",
)
corresponding_to_inventory_number = models.ForeignKey(
"FundinventarInventarnummern",
related_name='rvn_fundinventarinventarnummern_corresponding_to_inventory_number_fundinventarinventarnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Corresponding to inventory number",
help_text="helptext for corresponding_to_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Corresponding_to_inventory_number",
)
find_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_inventory_number",
)
find_local_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find local number",
help_text="helptext for find_local_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_local_number",
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Convolute_inventory_number",
)
find_comment = models.TextField(
blank=True, null=True,
verbose_name="Find comment",
help_text="helptext for find_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fundinventarinventarnummern_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Excavation_object_ID",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_material",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_type",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Stratum_comment",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Stratum_ID_relative",
)
find_date = models.DateField(
blank=True, null=True,
verbose_name="Find date",
help_text="helptext for find_date",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Find_date",
)
storage_find = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage of find",
help_text="helptext for storage_find",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Storage_find",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Stratum_ID_absolute_prepub",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Phase_ID",
)
relatedto = models.ManyToManyField(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_relatedto_skosconcept',
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__RelatedTo",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Access",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Digitisation_comment",
)
uncertainty_excavation_digitisation = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarinventarnummern_uncertainty_excavation_digitisation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or digital",
help_text="helptext for uncertainty_excavation_digitisation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Inventarnummern/Find_inventory_number.csv__Uncertainty__excavation_digitisation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'find_inventory_number',
]
verbose_name = "Fundinventar Inventarnummern"
def __str__(self):
if self.find_inventory_number:
return "{}".format(self.find_inventory_number)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fundinventarinventarnummern_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fundinventarinventarnummern_create')
def get_absolute_url(self):
return reverse('archiv:fundinventarinventarnummern_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fundinventarinventarnummern_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fundinventarinventarnummern_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fundinventarinventarnummern_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fundinventarinventarnummern_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fundinventarinventarnummern_detail',
kwargs={'pk': prev.first().id}
)
return False
class FundinventarKonvolutnummern(models.Model):
""" Inventory of convolute numbers """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Convolute_inventory_number",
)
convolute_subnumber = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute subnumber",
help_text="helptext for convolute_subnumber",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Convolute_subnumber",
)
find_local_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find local number",
help_text="helptext for find_local_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Find_local_number",
)
corresponding_to_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Corresponding to inventory number",
help_text="helptext for corresponding_to_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Corresponding_to_inventory_number",
)
find_material = models.ManyToManyField(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_find_material_skosconcept',
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Find_material",
)
find_comment = models.TextField(
blank=True, null=True,
verbose_name="Find comment",
help_text="helptext for find_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Find_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fundinventarkonvolutnummern_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_fundinventarkonvolutnummern_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Archaeological_object_ID",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Find_type",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Stratum_ID_relative",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Stratum_comment",
arche_prop="--",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Stratum_ID_absolute_prepub",
)
find_date = models.DateField(
blank=True, null=True,
verbose_name="Find date",
help_text="helptext for find_date",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Find_date",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Phase_ID",
)
storage_find = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_storage_find_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Storage of find",
help_text="helptext for storage_find",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Storage_find",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Access",
)
relatedto = models.CharField(
max_length=250,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__RelatedTo",
)
uncertainty_excavation_digitisation = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_uncertainty_excavation_digitisation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or digital",
help_text="helptext for uncertainty_excavation_digitisation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Uncertainty__excavation_digitisation",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Digitisation_comment",
)
creator_metadata = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarkonvolutnummern_creator_metadata_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Konvolutnummern/Convolute_inventory_number.csv__Creator_metadata",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'convolute_inventory_number',
]
verbose_name = "Fundinventar Konvolutnummern"
def __str__(self):
if self.convolute_inventory_number:
return "{}".format(self.convolute_inventory_number)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fundinventarkonvolutnummern_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fundinventarkonvolutnummern_create')
def get_absolute_url(self):
return reverse('archiv:fundinventarkonvolutnummern_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fundinventarkonvolutnummern_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fundinventarkonvolutnummern_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fundinventarkonvolutnummern_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fundinventarkonvolutnummern_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fundinventarkonvolutnummern_detail',
kwargs={'pk': prev.first().id}
)
return False
class FundinventarMaterialproben(models.Model):
""" Inventory of material samples """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_fundinventarmaterialproben_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Creator_metadata",
)
archaeological_object_id = models.ForeignKey(
"ExcavationObjectID",
related_name='rvn_fundinventarmaterialproben_archaeological_object_id_excavationobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Arachaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Archaeological_object_ID",
)
relatedto = models.ForeignKey(
"Fundinventar4DPuzzleID",
related_name='rvn_fundinventarmaterialproben_relatedto_fundinventar4dpuzzleid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__RelatedTo",
)
material_sample_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Material sample inventory number",
help_text="helptext for material_sample_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Material_sample_inventory_number",
)
find_local_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find local number",
help_text="helptext for find_local_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Find_local_number",
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Convolute_inventory_number",
)
corresponding_to_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Corresponding to inventory number",
help_text="helptext for corresponding_to_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Corresponding_to_inventory_number",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Find_material",
)
find_comment = models.TextField(
blank=True, null=True,
verbose_name="Find comment",
help_text="helptext for find_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Find_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fundinventarmaterialproben_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Excavation_object_ID",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Find_type",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Stratum_ID_absolute_prepub",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Stratum_comment",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Phase_ID",
)
find_year = models.DateField(
blank=True, null=True,
verbose_name="Find year",
help_text="helptext for find_year",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Find_year",
)
storage_find = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_storage_find_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Storage find",
help_text="helptext for storage_find",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Storage_find",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Access",
)
uncertainty_excavation_digitisation = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarmaterialproben_uncertainty_excavation_digitisation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or digital",
help_text="helptext for uncertainty_excavation_digitisation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Uncertainty__excavation_digitisation",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Materialproben/Material_sample_inventory_no.csv__Digitisation_comment",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'material_sample_inventory_number',
]
verbose_name = "Fundinventar Materialproben"
def __str__(self):
if self.material_sample_inventory_number:
return "{}".format(self.material_sample_inventory_number)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fundinventarmaterialproben_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fundinventarmaterialproben_create')
def get_absolute_url(self):
return reverse('archiv:fundinventarmaterialproben_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fundinventarmaterialproben_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fundinventarmaterialproben_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fundinventarmaterialproben_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fundinventarmaterialproben_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fundinventarmaterialproben_detail',
kwargs={'pk': prev.first().id}
)
return False
class FundinventarSteininventar(models.Model):
""" Inventory of stones """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_fundinventarsteininventar_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Creator_metadata",
)
archaeological_object_id = models.ForeignKey(
"ExcavationObjectID",
related_name='rvn_fundinventarsteininventar_archaeological_object_id_excavationobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Arachaeological object ID",
help_text="helptext for archaeological_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Archaeological_object_ID",
)
find_material = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_find_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find material",
help_text="helptext for find_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_material",
)
find_type = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_find_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find type",
help_text="helptext for find_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_type",
)
find_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_inventory_number",
)
find_local_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Find local number",
help_text="helptext for find_local_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_local_number",
)
convolute_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Convolute_inventory_number",
)
corresponding_to_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Corresponding to inventory number",
help_text="helptext for corresponding_to_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Corresponding_to_inventory_number",
)
stratum_id_relative = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_stratum_id_relative_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID relative",
help_text="helptext for stratum_id_relative",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Stratum_ID_relative",
)
stratum_id_absolute_prepub = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_stratum_id_absolute_prepub_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum ID absolute pre publication",
help_text="helptext for stratum_id_absolute_prepub",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Stratum_ID_absolute_prepub",
)
find_comment = models.TextField(
blank=True, null=True,
verbose_name="Find comment",
help_text="helptext for find_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_comment",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_fundinventarsteininventar_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="helptext for excavation_object_id",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Excavation_object_ID",
)
phase_id = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_phase_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Phase_ID",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="helptext for access",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Access",
)
storage_find = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_storage_find_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Storage of find",
help_text="helptext for storage_find",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Storage_find",
)
stratum_comment = models.TextField(
blank=True, null=True,
verbose_name="Stratum Comment",
help_text="helptext for stratum_comment",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Stratum_comment",
)
uncertainty_excavation_digitisation = models.ForeignKey(
SkosConcept,
related_name='rvn_fundinventarsteininventar_uncertainty_excavation_digitisation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or digital",
help_text="helptext for uncertainty_excavation_digitisation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Uncertainty__excavation_digitisation",
)
find_date = models.DateField(
blank=True, null=True,
verbose_name="Find date",
help_text="helptext for find_date",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Find_date",
)
relatedto = models.ManyToManyField(
SkosConcept,
related_name='rvn_fundinventarsteininventar_relatedto_skosconcept',
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__RelatedTo",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="helptext for digitisation_comment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Fundinventar_Steininventar/Bone_Stone_inventory_number.csv__Digitisation_comment",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'find_inventory_number',
]
verbose_name = "FundinventarSteininventar"
def __str__(self):
if self.find_inventory_number:
return "{}".format(self.find_inventory_number)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:fundinventarsteininventar_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:fundinventarsteininventar_create')
def get_absolute_url(self):
return reverse('archiv:fundinventarsteininventar_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:fundinventarsteininventar_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:fundinventarsteininventar_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:fundinventarsteininventar_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:fundinventarsteininventar_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:fundinventarsteininventar_detail',
kwargs={'pk': prev.first().id}
)
return False
class GIS(models.Model):
""" Geographical information system """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_gis_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_gis_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_gis_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__creator_archivalObject",
arche_prop="hasContributor",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_gis_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Document_type",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Path_filename_old",
)
path_filename_arche = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in ARCHE",
help_text="helptext for path_filename_arche",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Path_filename_ARCHE",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Creation_date_original",
arche_prop="hasCreatedDate",
)
software_used = models.CharField(
max_length=250,
blank=True,
verbose_name="Software used",
help_text="helptext for software_used",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Software_used",
arche_prop="hasUsedSoftware",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date of archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Creation_date_metadata",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_gis_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_gis_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Archaeological_object_ID",
)
relatedto = models.ManyToManyField(
"DocumentTypes",
related_name='rvn_gis_relatedto_documenttypes',
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__RelatedTo",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original document",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Site_ID",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_gis_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_GIS/GIS_metadata.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "GIS"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:gis_browse')
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_createview_url(self):
return reverse('archiv:gis_create')
def get_absolute_url(self):
return reverse('archiv:gis_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:gis_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:gis_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:gis_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:gis_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:gis_detail',
kwargs={'pk': prev.first().id}
)
return False
class Geophysics(models.Model):
""" Files from geophysical surveys """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_geophysics_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_geophysics_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_geophysics_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creator_archivalObject",
arche_prop="hasContributor",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_geophysics_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Document_type",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Filename_old",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creation_date_original",
arche_prop="hasCreatedDate",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date of archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Creation_date_metadata",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Path_filename_old",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_geophysics_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Excavation_object_ID",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Original_comment",
arche_prop="hasDescription",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original document",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__File_extension_archivalObject",
)
method = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_method_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Method",
help_text="helptext for method",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Method",
arche_prop="hasAppliedMethod",
)
equipment = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_equipment_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment",
help_text="helptext for equipment",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Equipment",
arche_prop="hasUsedHardware",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Site_ID",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_geophysics_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Geomagnetik/Geophysik_Metadata.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Geophysics"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:geophysics_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:geophysics_create')
def get_absolute_url(self):
return reverse('archiv:geophysics_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:geophysics_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:geophysics_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:geophysics_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:geophysics_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:geophysics_detail',
kwargs={'pk': prev.first().id}
)
return False
class Inventorybooks(models.Model):
""" Digitised inventory books """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_inventorybooks_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_inventorybooks_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creator_original",
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_inventorybooks_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_inventorybooks_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Document_type",
)
convolute_inventory_number = models.ForeignKey(
"FundinventarKonvolutnummern",
related_name='rvn_inventorybooks_convolute_inventory_number_fundinventarkonvolutnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Convolute inventory number",
help_text="helptext for convolute_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Convolute_inventory_number",
)
bone_stone_inventory_number = models.ForeignKey(
"FundinventarSteininventar",
related_name='rvn_inventorybooks_bone_stone_inventory_number_fundinventarsteininventar',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Bone or stone inventory number",
help_text="helptext for bone_stone_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Bone_stone_inventory_number",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Filename_old",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creation_date_original",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creation_year_original",
arche_prop="hasCreatedDateOriginal"
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Creation_date_metadata",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder of original document",
help_text="helptext for storage_folder_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Storage_folder_original",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
find_inventory_number = models.ManyToManyField(
"FundinventarInventarnummern",
related_name='rvn_inventorybooks_find_inventory_number_fundinventarinventarnummern',
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Find_inventory_number",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Original_comment",
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment used for scanning",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Source__original_copy_edited-copy",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Original_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_inventorybooks_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Inventarbuecher/Find_inventory.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Inventory books"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:inventorybooks_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:inventorybooks_create')
def get_absolute_url(self):
return reverse('archiv:inventorybooks_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:inventorybooks_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:inventorybooks_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:inventorybooks_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:inventorybooks_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:inventorybooks_detail',
kwargs={'pk': prev.first().id}
)
return False
class PhasenID(models.Model):
""" Identifier of archaeological phases """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
phase_type = models.ForeignKey(
SkosConcept,
related_name='rvn_phasenid_phase_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Phase type",
help_text="helptext for phase_type",
).set_extra(
is_public=False,
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_phasenid_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="helptext for site_id",
).set_extra(
is_public=False,
)
phase_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Phase ID",
help_text="helptext for phase_id",
).set_extra(
is_public=False,
)
phase_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Phase title",
help_text="helptext for phase_title",
).set_extra(
is_public=False,
)
area = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_phasenid_area_excavationobjectid',
blank=True,
verbose_name="Area",
help_text="helptext for area",
).set_extra(
is_public=False,
)
containing_phase_id = models.ManyToManyField(
SkosConcept,
related_name='rvn_phasenid_containing_phase_id_skosconcept',
blank=True,
verbose_name="Containing phase ID",
help_text="helptext for containing_phase_id",
).set_extra(
is_public=False,
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'phase_id',
]
verbose_name = "Phasen ID"
def __str__(self):
if self.phase_id:
return "{}".format(self.phase_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:phasenid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:phasenid_create')
def get_absolute_url(self):
return reverse('archiv:phasenid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:phasenid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:phasenid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:phasenid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:phasenid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:phasenid_detail',
kwargs={'pk': prev.first().id}
)
return False
class Protocols(models.Model):
""" Digitised protocols """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_protocols_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_protocols_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creator_original",
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_protocols_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
excavation_object_id = models.ForeignKey(
"ExcavationObjectID",
related_name='rvn_protocols_excavation_object_id_excavationobjectid',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Excavation_object_ID",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. P for Protocol) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Filename_old",
)
document_type = models.ManyToManyField(
"DocumentTypes",
related_name='rvn_protocols_document_type_documenttypes',
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Document_type",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creation_date_original",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creation_year_original",
arche_prop="hasCreatedDateOriginal"
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Creation_date_metadata",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder of original document",
help_text="helptext for storage_folder_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Storage_folder_original",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_protocols_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Archaeological_object_ID",
)
number_of_pages = models.IntegerField(
blank=True, null=True,
verbose_name="Number of pages",
help_text="helptext for number_of_pages",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Number_of_pages",
arche_prop="hasExtent",
arche_prop_str_template="<value> pages",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Access",
arche_prop="hasAccessRestriction",
)
storage = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_storage_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Storage folder of original document",
help_text="helptext for storage",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Storage",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment used for scanning",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Source__original_copy_edited-copy",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Original_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_protocols_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Protokolle/Protocol.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Protocols"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:protocols_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:protocols_create')
def get_absolute_url(self):
return reverse('archiv:protocols_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:protocols_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:protocols_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:protocols_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:protocols_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:protocols_detail',
kwargs={'pk': prev.first().id}
)
return False
class StratenID(models.Model):
""" Identifier of archaeological strata """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
stratum_type = models.ForeignKey(
SkosConcept,
related_name='rvn_stratenid_stratum_type_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Stratum type",
help_text="helptext for stratum_type",
).set_extra(
is_public=False,
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_stratenid_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="helptext for site_id",
).set_extra(
is_public=False,
)
stratum_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum ID",
help_text="helptext for stratum_id",
).set_extra(
is_public=False,
)
stratum_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Stratum title",
help_text="helptext for stratum_title",
).set_extra(
is_public=False,
)
area = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_stratenid_area_excavationobjectid',
blank=True,
verbose_name="Area",
help_text="helptext for area",
).set_extra(
is_public=False,
)
containing_stratum_id = models.ManyToManyField(
SkosConcept,
related_name='rvn_stratenid_containing_stratum_id_skosconcept',
blank=True,
verbose_name="Containing stratum ID",
help_text="helptext for containing_stratum_id",
).set_extra(
is_public=False,
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'stratum_id',
]
verbose_name = "Straten ID"
def __str__(self):
if self.stratum_id:
return "{}".format(self.stratum_id)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:stratenid_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:stratenid_create')
def get_absolute_url(self):
return reverse('archiv:stratenid_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:stratenid_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:stratenid_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:stratenid_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:stratenid_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:stratenid_detail',
kwargs={'pk': prev.first().id}
)
return False
class Tables(models.Model):
""" Tables """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_tables_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_tables_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_tables_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__creator_archivalObject",
arche_prop="hasContributor",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_tables_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Document_type",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. P for Protocol) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Path_filename_old",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Creation_year_original",
arche_prop="hasCreatedDate",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date of archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Creation_date_metadata",
)
folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Folder original",
help_text="helptext for folder_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__folder_original",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_tables_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_tables_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Archaeological_object_ID",
)
relatedto = models.ManyToManyField(
"DocumentTypes",
related_name='rvn_tables_relatedto_documenttypes',
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__RelatedTo",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original document",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Site_ID",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_tables_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Tabellen/Tabelle_metadata.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Tables"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:tables_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:tables_create')
def get_absolute_url(self):
return reverse('archiv:tables_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:tables_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:tables_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:tables_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:tables_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:tables_detail',
kwargs={'pk': prev.first().id}
)
return False
class ThreeDimensionalModel(models.Model):
""" 3D models """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="Thefilenameofconvolutecardsconsistsofthedocument_ID(uniqueidentifier).ThedocumentIDisaproject-specificuniqueidentifierwhichconsistsoftheabbreviationforthesite(TDforTellel-Daba),theabbreviationforthedocumenttype(e.g.KKforKonvolutkarte)andtheconvoluteinventorynumber(or,iftherewasnoinventorynumber,anIDwiththeprefix4DPuzzlewascreated,e.g.4DPuzzle1234).",
arche_prop="hasAlternativeTitle",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="Data path in the old TD archive.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Path_filename_old",
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_threedimensionalmodel_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year original",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Creation_year_original",
arche_prop="hasCreatedDate",
)
software_used = models.CharField(
max_length=250,
blank=True,
verbose_name="Software which was used to create original",
help_text="helptext for software_used",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Software_used",
arche_prop="hasUsedSoftware",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date of archival object",
help_text="Date when the resource was prepared for long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Creation_date_archivalObject",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_threedimensionalmodel_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original ",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_threedimensionalmodel_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of archival object",
help_text="helptext for creator_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__creator_archivalObject",
arche_prop="hasContributor",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Creation_date_metadata",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_threedimensionalmodel_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_threedimensionalmodel_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Archaeological_object_ID",
)
relatedto = models.CharField(
max_length=250,
blank=True,
verbose_name="File is related to other TD resources",
help_text="helptext for relatedto",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__RelatedTo",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Digitisation_comment",
arche_prop="hasNote",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_threedimensionalmodel_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Document_type",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original 3D model",
help_text="",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival data",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Site_ID",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_threedimensionalmodel_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="The document ID is a project-specific unique identifier which consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. SWnegfilm for black &white negative film, FDfilm for colour slide film, FDdig for colour slide film digitised ) and the inventory numbers (from_to).",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_3D/3D_metadata.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "3D models"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:threedimensionalmodel_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:threedimensionalmodel_create')
def get_absolute_url(self):
return reverse('archiv:threedimensionalmodel_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:threedimensionalmodel_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:threedimensionalmodel_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:threedimensionalmodel_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:threedimensionalmodel_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:threedimensionalmodel_detail',
kwargs={'pk': prev.first().id}
)
return False
class Videos(models.Model):
""" Videos """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_videos_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Creator_metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_videos_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original document",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Creator_original",
arche_prop="hasCreator",
)
creator_archivalobject = models.ForeignKey(
"Actor",
related_name='rvn_videos_creator_archivalobject_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="creator of archival object",
help_text="Person who processed resource for digital long-term archiving.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__creator_archivalObject",
arche_prop="hasContributor",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_videos_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Document_type",
)
find_inventory_number = models.ForeignKey(
"FundinventarInventarnummern",
related_name='rvn_videos_find_inventory_number_fundinventarinventarnummern',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Find inventory number",
help_text="helptext for find_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Find_inventory_number",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename ",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID ",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. P for Protocol) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Creation_date_original",
arche_prop="hasCreatedDate",
)
creation_date_archivalobject = models.DateField(
blank=True, null=True,
verbose_name="Creation date of archival object",
help_text="helptext for creation_date_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Creation_date_archivalObject",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Creation_date_metadata",
)
path_filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in old TD archive",
help_text="helptext for path_filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Path_filename_old",
)
path_filename_arche = models.CharField(
max_length=250,
blank=True,
verbose_name="Data path in ARCHE",
help_text="helptext for path_filename_arche",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Path_filename_ARCHE",
)
excavation_object_id = models.ManyToManyField(
"ExcavationObjectID",
related_name='rvn_videos_excavation_object_id_excavationobjectid',
blank=True,
verbose_name="Excavation object ID",
help_text="The unique identifier of an excavation object. Excavation objects are created by the archaeologist and include for example squares or sections. The excavation object ID consists of the abbreviation of site_area_square trench_description of excavation object (e.g.: TD_F-I_o19_Planum1 means Tell el-Daba, area F-I, square o19, level 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Excavation_object_ID",
)
archaeological_object_id = models.ManyToManyField(
"ArchaeologicalObjectID",
related_name='rvn_videos_archaeological_object_id_archaeologicalobjectid',
blank=True,
verbose_name="Archaeological object ID",
help_text="The unique identifier of an archaeological object. Archaeological objects are all objects that were created in the past, e.g. in the Bronze Age. An archaeological object ID contains the abbreviation of site_area_square trench_name of archaeological object (e.g.: TD_F-I_o19_Grab1 means Tell el-Daba, area F-I, square o19, grave 1).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Archaeological_object_ID",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension_original = models.ForeignKey(
SkosConcept,
related_name='rvn_videos_file_extension_original_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of original document",
help_text="helptext for file_extension_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__File_extension_original",
)
file_extension_archivalobject = models.ForeignKey(
SkosConcept,
related_name='rvn_videos_file_extension_archivalobject_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension of archival object",
help_text="helptext for file_extension_archivalobject",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__File_extension_archivalObject",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_videos_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_videos_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_videos_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Video/Video_metadata.csv__Site_ID",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Videos"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def import_in_arche(self):
return True
@classmethod
def get_listview_url(self):
return reverse('archiv:videos_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:videos_create')
def get_absolute_url(self):
return reverse('archiv:videos_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:videos_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:videos_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:videos_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:videos_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:videos_detail',
kwargs={'pk': prev.first().id}
)
return False
class WallpaintingInventory(models.Model):
""" Digitised inventory of wallpaintings """
legacy_id = models.CharField(
max_length=300, blank=True,
verbose_name="Legacy ID"
)
creator_metadata = models.ForeignKey(
"Actor",
related_name='rvn_wallpaintinginventory_creator_metadata_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of metadata",
help_text="helptext for creator_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creator_Metadata",
arche_prop="hasMetadataCreator",
)
creator_original = models.ForeignKey(
"Actor",
related_name='rvn_wallpaintinginventory_creator_original_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of original ",
help_text="helptext for creator_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creator_original",
)
creator_scan = models.ForeignKey(
"Actor",
related_name='rvn_wallpaintinginventory_creator_scan_actor',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Creator of scan",
help_text="helptext for creator_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creator_scan",
arche_prop="hasDigitisingAgent",
)
document_type = models.ForeignKey(
"DocumentTypes",
related_name='rvn_wallpaintinginventory_document_type_documenttypes',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Document type",
help_text="helptext for document_type",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Document_type",
)
filename = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename",
help_text="Consists of the document_ID (unique identifier) and the document_title (description of the content of the document), separated by two underscores.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Filename",
arche_prop="hasAlternativeTitle",
)
document_id = models.CharField(
max_length=250,
blank=True,
verbose_name="Document ID",
help_text="The project-specific unique identifier of the document. It consists of the abbreviation for the site (TD for Tell el-Daba), the abbreviation for the document type (e.g. DR for Digital Resource) and an inventory number (or, if there was no inventory number, an ID with the prefix 4DPuzzle was created, e.g. 4DPuzzle1234).",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Document_ID",
arche_prop="hasNonLinkedIdentifier",
arche_prop_str_template="4DP document ID: <value>",
)
document_title = models.CharField(
max_length=250,
blank=True,
verbose_name="Document title",
help_text="A description of the content of the document. It allows information about the contents of the file to be understood by a human being without opening it. ",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Document_title",
arche_prop="hasAlternativeTitle",
)
filename_old = models.CharField(
max_length=250,
blank=True,
verbose_name="Filename old",
help_text="helptext for filename_old",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Filename_old",
)
creation_date_original = models.DateField(
blank=True, null=True,
verbose_name="Creation date of original document",
help_text="helptext for creation_date_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creation_date_original",
)
creation_year_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Creation year of original document",
help_text="helptext for creation_year_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creation_year_original",
arche_prop="hasCreatedDateOriginal"
)
creation_date_scan = models.DateField(
blank=True, null=True,
verbose_name="Creation date of scan",
help_text="helptext for creation_date_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creation_date_scan",
arche_prop="hasCreatedDate",
)
creation_date_metadata = models.DateField(
blank=True, null=True,
verbose_name="Creation date of metadata",
help_text="helptext for creation_date_metadata",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Creation_date_metadata",
)
storage_folder_original = models.CharField(
max_length=250,
blank=True,
verbose_name="Storage folder of original wallpainting",
help_text="helptext for storage_folder_original",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Storage_folder_original",
)
resolution_scan_dpi = models.IntegerField(
blank=True, null=True,
verbose_name="Scan resolution",
help_text="helptext for resolution_scan_dpi",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Resolution_scan_dpi",
arche_prop="hasTechnicalInfo",
arche_prop_str_template="<value> dpi",
)
fresco_inventory_number = models.CharField(
max_length=250,
blank=True,
verbose_name="Fresco inventory number",
help_text="helptext for fresco_inventory_number",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Fresco_inventory_number",
)
original_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment on the original document",
help_text="Comments from the creation of the original resource.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Original_comment",
)
digitisation_comment = models.TextField(
blank=True, null=True,
verbose_name="Comment from digitisation",
help_text="Comments from digitisation.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Digitisation_comment",
arche_prop="hasNote",
)
file_extension = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_file_extension_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="File extension ",
help_text="helptext for file_extension",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__File_extension",
)
copyright = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_copyright_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Copyright",
help_text="helptext for copyright",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Copyright",
)
access = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_access_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Access",
help_text="Whether access to the resource is restricted or if it is open to the public.",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Access",
arche_prop="hasAccessRestriction",
)
site_id = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_site_id_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Site ID",
help_text="Abbreviation of Tell el-Daba is 'TD'.",
).set_extra(
is_public=False,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Site_ID",
)
equipment_scan = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_equipment_scan_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Equipment for scan",
help_text="helptext for equipment_scan",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Equipment_scan",
arche_prop="hasUsedHardware",
)
source_original_copy_edited_copy = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_source_original_copy_edited_copy_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Wheter source is a original or a copy",
help_text="helptext for source_original_copy_edited_copy",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Source__original_copy_edited-copy",
)
original_material = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_original_material_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Material of original document",
help_text="helptext for original_material",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Original_material",
)
excavation_post_excavation = models.ForeignKey(
SkosConcept,
related_name='rvn_wallpaintinginventory_excavation_post_excavation_skosconcept',
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Whether it was created during excavation or after (post-excavation)",
help_text="helptext for excavation_post_excavation",
).set_extra(
is_public=True,
data_lookup="excel2csv/archiv/4DP_Metadaten_Freskeninventar/Fresco_inventory.csv__Excavation__post_excavation",
)
orig_data_csv = models.TextField(
blank=True,
null=True,
verbose_name="The original data"
).set_extra(
is_public=True
)
fc_name = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field name"
).set_extra(
is_public=False
)
fc_directory = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field directory"
).set_extra(
is_public=False,
)
fc_type = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field type"
).set_extra(
is_public=False
)
fc_filename = models.TextField(
blank=True,
null=True,
verbose_name="filechecker field filename"
).set_extra(
is_public=False
)
fc_extension = models.CharField(
blank=True,
null=True,
max_length=40,
verbose_name="filechecker field extension"
).set_extra(
is_public=False
)
fc_match = models.BooleanField(
default=False,
verbose_name="Matches FileChecker Entry",
)
class Meta:
ordering = [
'filename',
]
verbose_name = "Freskeninventar"
def __str__(self):
if self.filename:
return "{}".format(self.filename)
else:
return "{}".format(self.legacy_id)
def field_dict(self):
return model_to_dict(self)
@classmethod
def get_listview_url(self):
return reverse('archiv:wallpaintinginventory_browse')
@classmethod
def get_createview_url(self):
return reverse('archiv:wallpaintinginventory_create')
def get_absolute_url(self):
return reverse('archiv:wallpaintinginventory_detail', kwargs={'pk': self.id})
def get_absolute_url(self):
return reverse('archiv:wallpaintinginventory_detail', kwargs={'pk': self.id})
def get_delete_url(self):
return reverse('archiv:wallpaintinginventory_delete', kwargs={'pk': self.id})
def get_edit_url(self):
return reverse('archiv:wallpaintinginventory_edit', kwargs={'pk': self.id})
def get_next(self):
next = self.__class__.objects.filter(id__gt=self.id)
if next:
return reverse(
'archiv:wallpaintinginventory_detail',
kwargs={'pk': next.first().id}
)
return False
def get_prev(self):
prev = self.__class__.objects.filter(id__lt=self.id).order_by('-id')
if prev:
return reverse(
'archiv:wallpaintinginventory_detail',
kwargs={'pk': prev.first().id}
)
return False
| 36.65655
| 568
| 0.671488
| 41,839
| 370,781
| 5.617964
| 0.013671
| 0.042774
| 0.034801
| 0.055682
| 0.954495
| 0.943965
| 0.929453
| 0.906773
| 0.889662
| 0.866641
| 0
| 0.00857
| 0.241215
| 370,781
| 10,114
| 569
| 36.660174
| 0.826884
| 0.00291
| 0
| 0.748258
| 1
| 0.008946
| 0.37218
| 0.208958
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034745
| false
| 0
| 0.001768
| 0.024654
| 0.179653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8dfb2f7d9e62aa1f85ec582abb95b1551d6441ad
| 134
|
py
|
Python
|
sem_02/lab_06/src/queueing_system/__init__.py
|
bmstu-ics7/modeling
|
30f089cab44e8c56886acf6433abe8cc252ae722
|
[
"MIT"
] | 2
|
2020-06-08T18:50:29.000Z
|
2021-02-27T11:20:44.000Z
|
sem_02/lab_06/src/queueing_system/__init__.py
|
bmstu-ics7/modeling
|
30f089cab44e8c56886acf6433abe8cc252ae722
|
[
"MIT"
] | null | null | null |
sem_02/lab_06/src/queueing_system/__init__.py
|
bmstu-ics7/modeling
|
30f089cab44e8c56886acf6433abe8cc252ae722
|
[
"MIT"
] | 1
|
2020-10-22T10:49:42.000Z
|
2020-10-22T10:49:42.000Z
|
import queueing_system.distribution
import queueing_system.generator
import queueing_system.processor
import queueing_system.modeller
| 26.8
| 35
| 0.910448
| 16
| 134
| 7.375
| 0.4375
| 0.474576
| 0.677966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 134
| 4
| 36
| 33.5
| 0.936508
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5c34013f96ffdcc8f84019f432d6bb8aa56d7424
| 392
|
py
|
Python
|
tests/fixtures-example.py
|
jhsu98/zws-py
|
27d29b17a40918e905b7830cf4c4a1ed88dae56b
|
[
"MIT"
] | 4
|
2022-01-03T00:25:45.000Z
|
2022-02-04T21:51:25.000Z
|
tests/fixtures-example.py
|
jhsu98/zws-py
|
27d29b17a40918e905b7830cf4c4a1ed88dae56b
|
[
"MIT"
] | 1
|
2022-02-04T19:12:25.000Z
|
2022-02-04T21:40:18.000Z
|
tests/fixtures-example.py
|
jhsu98/zws-py
|
27d29b17a40918e905b7830cf4c4a1ed88dae56b
|
[
"MIT"
] | null | null | null |
"""
To use pytest, rename this file `fixtures.py` and add in values below
"""
from zerionAPI import IFB, DFA
import pytest
@pytest.fixture
def server():
return ''
@pytest.fixture
def client_key():
return ''
@pytest.fixture
def client_secret():
return ''
@pytest.fixture
def ifb_client():
return IFB('', '', '')
@pytest.fixture
def dfa_client():
return DFA('', '', '')
| 15.68
| 69
| 0.653061
| 51
| 392
| 4.941176
| 0.490196
| 0.257937
| 0.31746
| 0.261905
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188776
| 392
| 25
| 70
| 15.68
| 0.792453
| 0.17602
| 0
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| true
| 0
| 0.117647
| 0.294118
| 0.705882
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
30a01b24ea8278eef36d9695eaad72793c4b9af8
| 185
|
py
|
Python
|
devon/web/shutdown.py
|
joehewitt/devon
|
5b11265e5eae3db7bfaeb49543a2a6293bd15557
|
[
"BSD-3-Clause"
] | 3
|
2015-12-25T16:26:02.000Z
|
2016-05-08T18:19:25.000Z
|
devon/web/shutdown.py
|
joehewitt/devon
|
5b11265e5eae3db7bfaeb49543a2a6293bd15557
|
[
"BSD-3-Clause"
] | null | null | null |
devon/web/shutdown.py
|
joehewitt/devon
|
5b11265e5eae3db7bfaeb49543a2a6293bd15557
|
[
"BSD-3-Clause"
] | 1
|
2021-07-13T07:17:01.000Z
|
2021-07-13T07:17:01.000Z
|
import devon.server.web
# **************************************************************************************************
def main(request):
devon.server.web.stopServer()
| 23.125
| 100
| 0.313514
| 11
| 185
| 5.272727
| 0.727273
| 0.37931
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097297
| 185
| 8
| 101
| 23.125
| 0.347305
| 0.52973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
30bba27cc365fcfd2dbdafd50ba9db454abf18e0
| 273
|
py
|
Python
|
UDEMY-Learn Python Programming Masterclass/Section 9-Modules and Functions in Python/import_webbrowser.py
|
Sanjay9921/Python
|
05ac161dd46f9b4731a5c14ff5ef52adb705e8e6
|
[
"MIT"
] | null | null | null |
UDEMY-Learn Python Programming Masterclass/Section 9-Modules and Functions in Python/import_webbrowser.py
|
Sanjay9921/Python
|
05ac161dd46f9b4731a5c14ff5ef52adb705e8e6
|
[
"MIT"
] | null | null | null |
UDEMY-Learn Python Programming Masterclass/Section 9-Modules and Functions in Python/import_webbrowser.py
|
Sanjay9921/Python
|
05ac161dd46f9b4731a5c14ff5ef52adb705e8e6
|
[
"MIT"
] | null | null | null |
import webbrowser
# webbrowser.open("https://www.youtube.co.in/")
# help(webbrowser)
# chrome = webbrowser.get("/usr/bin/google-chrome %s").open_new_tab("https://www.youtube.co.in/")
# safari = webbrowser.get(using = "safari").open_new_tab("https://www.youtube.co.in/")
| 30.333333
| 97
| 0.70696
| 40
| 273
| 4.725
| 0.475
| 0.126984
| 0.238095
| 0.269841
| 0.407407
| 0.306878
| 0.306878
| 0.306878
| 0
| 0
| 0
| 0
| 0.07326
| 273
| 9
| 98
| 30.333333
| 0.747036
| 0.89011
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb5c4d8db79be4055fc156c80ca33fb6e01d53fc
| 161
|
py
|
Python
|
mediaproxy/http/__init__.py
|
media-proxy/mediaproxy.core
|
1733eb484a821c9a2215af7048f58461a1c114e8
|
[
"MIT"
] | null | null | null |
mediaproxy/http/__init__.py
|
media-proxy/mediaproxy.core
|
1733eb484a821c9a2215af7048f58461a1c114e8
|
[
"MIT"
] | null | null | null |
mediaproxy/http/__init__.py
|
media-proxy/mediaproxy.core
|
1733eb484a821c9a2215af7048f58461a1c114e8
|
[
"MIT"
] | null | null | null |
#~ # coding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import with_statement
from .server import run
| 23
| 39
| 0.832298
| 22
| 161
| 5.409091
| 0.590909
| 0.252101
| 0.403361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007143
| 0.130435
| 161
| 6
| 40
| 26.833333
| 0.842857
| 0.099379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ccdb5bf96d89291cdd51f544b8c80cded839e5ca
| 31,647
|
py
|
Python
|
tests/test_phonons.py
|
rosenbrockc/phonon-enumeration
|
a7878814e58eb6bcd993bec416cae72bb4585d69
|
[
"MIT-0"
] | 1
|
2022-01-13T02:57:55.000Z
|
2022-01-13T02:57:55.000Z
|
tests/test_phonons.py
|
skphy/phonon-enumeration
|
a7878814e58eb6bcd993bec416cae72bb4585d69
|
[
"MIT-0"
] | null | null | null |
tests/test_phonons.py
|
skphy/phonon-enumeration
|
a7878814e58eb6bcd993bec416cae72bb4585d69
|
[
"MIT-0"
] | 1
|
2021-11-30T02:35:26.000Z
|
2021-11-30T02:35:26.000Z
|
"""Methods for testing the subroutines in the phonons module."""
import unittest as ut
def _read_output(test):
values = []
with open("tests/phonons/"+test) as f:
for line in f:
values.append(eval(line))
return values
class TestGetArrowConcs(ut.TestCase):
""" Tests of the get_arrow_concs subroutine."""
def test1(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 4,
"basis_vecs": [],
"is_crestricted": False,
"arrows": False,
"concs": []
}
self.assertEqual(get_arrow_concs(params),[0,0,0,0])
def test2(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 2,
"basis_vecs": [],
"is_crestricted": True,
"arrows": False,
"concs": [[1, 4, 4, 0],[2, 4, 4, 0]]
}
self.assertEqual(get_arrow_concs(params),[0,0])
def test3(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 3,
"basis_vecs": [],
"is_crestricted": True,
"arrows": True,
"concs": [[0, 3, 6, 1],[3, 6, 6, 2],[0, 6, 6, 0]]
}
self.assertEqual(get_arrow_concs(params),[1,2,0])
def test4(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 10,
"basis_vecs": [],
"is_crestricted": False,
"arrows": False,
"concs": []
}
self.assertEqual(get_arrow_concs(params),[0,0,0,0,0,0,0,0,0,0])
def test5(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 1,
"basis_vecs": [],
"is_crestricted": True,
"arrows": False,
"concs": [[0, 4, 4, 10]]
}
self.assertEqual(get_arrow_concs(params),[0])
def test6(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 5,
"basis_vecs": [],
"is_crestricted": True,
"arrows": True,
"concs": [[0, 3, 6, 1],[3, 6, 6, 2],[0, 6, 6, 0],[0, 3, 6, 10],[3, 6, 6, 1]]
}
self.assertEqual(get_arrow_concs(params),[1,2,0,10,1])
def test7(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 3,
"basis_vecs": [],
"is_crestricted": True,
"arrows": True,
"concs": [[0, 3, 6, 0],[3, 6, 6, 0],[0, 6, 6, 0]]
}
self.assertEqual(get_arrow_concs(params),[0,0,0])
def test8(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 4,
"basis_vecs": [],
"is_crestricted": False,
"arrows": False,
"concs": [[0, 3, 6, 3],[3, 6, 6, 2],[0, 6, 6, 1],[0, 6, 6, 1]]
}
self.assertEqual(get_arrow_concs(params),[0,0,0,0])
def test9(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 1,
"basis_vecs": [],
"is_crestricted": True,
"arrows": True,
"concs": [[0, 6, 6, 5]]
}
self.assertEqual(get_arrow_concs(params),[5])
def test10(self):
from phenum.phonons import get_arrow_concs
params = {
"bulk": True,
"sizes": [],
"lat_vecs": [],
"nspecies": 2,
"basis_vecs": [],
"is_crestricted": True,
"arrows": True,
"concs": [[0, 3, 6, 2],[3, 6, 6, 3]]
}
self.assertEqual(get_arrow_concs(params),[2,3])
class TestArrowConcs(ut.TestCase):
"""Tests of the arrow_concs subroutine."""
def test1(self):
from phenum.phonons import arrow_concs
cList = [1, 2, 1]
aconcs = [0, 0.4245868629437351, 0]
self.assertEqual(arrow_concs(cList,aconcs),[[-1, 1], [-1, 3], [-1, 2], [-1, 2]])
def test2(self):
from phenum.phonons import arrow_concs
cList = [3]
aconcs = [0.8205195542173467]
out = [[-1, 1], [1, 1], [1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test3(self):
from phenum.phonons import arrow_concs
cList = [10, 3, 1]
aconcs = [0, 0.4989661535030203, 0]
out = [[-1, 3], [-1, 2], [-1, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 1], [-1, 1], [-1, 1], [-1, 1], [1, 2]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test4(self):
from phenum.phonons import arrow_concs
cList = [2, 4, 1, 5, 2, 1, 1]
aconcs = [0.9068065455664464, 0.2858477549741846, 0, 0, 0.6957735268097871, 0, 0]
out = [[-1, 1], [-1, 3], [-1, 5], [-1, 6], [-1, 7], [-1, 2], [-1, 2], [-1, 2], [-1, 4],
[-1, 4], [-1, 4], [-1, 4], [-1, 4], [1, 1], [1, 2], [1, 5]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test5(self):
from phenum.phonons import arrow_concs
cList = [3]
aconcs = [0.2871674398220775]
out = [[-1, 1], [-1, 1], [-1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test6(self):
from phenum.phonons import arrow_concs
cList = [3, 1]
aconcs = [0.32514696876724436, 0]
out = [[-1, 2], [-1, 1], [-1, 1], [-1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test7(self):
from phenum.phonons import arrow_concs
cList = [1]
aconcs = [0]
out = [[-1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test8(self):
from phenum.phonons import arrow_concs
cList = [2, 8, 3, 1]
aconcs = [0.8244881520042212, 0.33517966472359717, 0.677253228566329, 0]
out = [[-1, 1], [-1, 3], [-1, 4], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 2],
[1, 1], [1, 2], [1, 2], [1, 3], [1, 3]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test9(self):
from phenum.phonons import arrow_concs
cList = [4, 1, 1, 1]
aconcs = [0, 0, 0, 0]
out = [[-1, 2], [-1, 3], [-1, 4], [-1, 1], [-1, 1], [-1, 1], [-1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test10(self):
from phenum.phonons import arrow_concs
cList = [18, 1]
aconcs = [0,0]
out = [[-1, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test11(self):
from phenum.phonons import arrow_concs
cList = [2, 0, 1]
aconcs = [0,0,0]
out = [[-1, 3], [-1, 1], [-1,1]]
self.assertEqual(arrow_concs(cList,aconcs),out)
def test12(self):
from phenum.phonons import arrow_concs
cList = [3, 0, 2]
aconcs = [0,0,0.5]
out = [[-1, 3], [-1, 1], [-1, 1], [-1, 1], [1, 3]]
self.assertEqual(arrow_concs(cList,aconcs),out)
class TestHowManyArrows(ut.TestCase):
"""Tests of the how_many_arrows subroutine."""
def test1(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 2], [-1, 2], [-1, 1], [-1, 3]]
out = (0,0,[2,1,1])
self.assertEqual(how_many_arrows(tcol),out)
def test2(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [1, 1], [1, 1]]
out = (2,1,[1,2])
self.assertEqual(how_many_arrows(tcol),out)
def test3(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 1], [-1, 2], [-1, 2], [-1, 3], [1, 2]]
out = (1,1,[10,2,1,1])
self.assertEqual(how_many_arrows(tcol),out)
def test4(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 4], [-1, 4], [-1, 4], [-1, 4], [-1, 4], [-1, 2], [-1, 2], [-1, 2], [-1, 1],
[-1, 3], [-1, 5], [-1, 6], [-1, 7], [1, 1], [1, 2], [1, 5]]
out = (3,3,[5,3,1,1,1,1,1,1,1,1])
self.assertEqual(how_many_arrows(tcol),out)
def test5(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [-1, 1], [-1, 1]]
out = (0,0,[3])
self.assertEqual(how_many_arrows(tcol),out)
def test6(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [-1, 1], [-1, 1], [-1, 2]]
out = (0,0,[3,1])
self.assertEqual(how_many_arrows(tcol),out)
def test7(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1]]
out = (0,0,[1])
self.assertEqual(how_many_arrows(tcol),out)
def test8(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 1], [-1, 3], [-1, 4],
[1, 2], [1, 2], [1, 3], [1, 3], [1, 1]]
out = (5,3,[6,1,1,1,2,2,1])
self.assertEqual(how_many_arrows(tcol),out)
def test9(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 3], [-1, 4]]
out = (0,0,[4,1,1,1])
self.assertEqual(how_many_arrows(tcol),out)
def test10(self):
from phenum.phonons import how_many_arrows
tcol = [[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1],
[-1, 2]]
out = (0,0,[18,1])
self.assertEqual(how_many_arrows(tcol),out)
class TestEnumSys(ut.TestCase):
"""Tests of the enum_sys subroutine."""
def test1(self):
from phenum.phonons import enum_sys
from numpy import array
groupfile = "tests/phonons/test_group.1"
concs = [1,2]
a_cons = [0,0]
num_wanted = 1
HNF = array([1,0,1,0,2,3])
params ={'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
out = [[[-1, 1], [-1, 2], [-1, 2]]]
self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
def test2(self):
from phenum.phonons import enum_sys
from numpy import array
groupfile = "tests/phonons/test_group.2"
concs = [3,3]
a_cons = [0,0]
num_wanted = 3
HNF = array([1,0,1,0,0,6])
params = {'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
out = [[[-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 2]], [[-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 1], [-1, 2]]]
self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
def test3(self):
from numpy import array
from phenum.phonons import enum_sys
groupfile = "tests/phonons/test_group.3"
concs = [4,3]
a_cons = [0,0]
num_wanted = 4
HNF = array([1,0,1,1,2,7])
params = {'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
out = _read_output("enum_sys.out.3")
self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
def test4(self):
from phenum.phonons import enum_sys
from numpy import array
groupfile = "tests/phonons/test_group.4"
concs = [3,4]
a_cons = [0,0]
num_wanted = 2
HNF = array([1,0,1,1,3,7])
params = {'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
out = [[[-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 2], [-1, 2]]]
self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
def test5(self):
from phenum.phonons import enum_sys
from numpy import array
groupfile = "tests/phonons/test_group.5"
concs = [4,4]
a_cons = [0,0]
num_wanted = 10
HNF = array([1,0,2,0,0,4])
params = {'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
out = [[[-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 1], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 1]], [[-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 1], [-1, 1], [-1, 2], [-1, 2]], [[-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 1], [-1, 2], [-1, 1], [-1, 2]], [[-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 1], [-1, 2], [-1, 2], [-1, 1]], [[-1, 1], [-1, 1], [-1, 2], [-1, 2], [-1, 2], [-1, 2], [-1, 1], [-1, 1]], [[-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 1], [-1, 2]], [[-1, 1], [-1, 2], [-1, 1], [-1, 2], [-1, 2], [-1, 1], [-1, 2], [-1, 1]]]
self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test6(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [3,1,2]
# a_cons = [0.0,0.5,0.25]
# num_wanted = 6
# HNF = array([1,0,1,0,0,6])
# params = {'bulk': True, 'nspecies': 3, 'concs': [[1.0, 6.0, 12.0, 0.0], [1.0, 9.0, 12.0, 0.5], [1.0, 12.0, 12.0, 0.25]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [6, 6], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.6")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test7(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [1,4,1]
# a_cons = [0.0,0.5,0.25]
# num_wanted = 124
# HNF = array([1,0,1,0,5,6])
# params = {'bulk': True, 'nspecies': 3, 'concs': [[1.0, 6.0, 12.0, 0.0], [1.0, 9.0, 12.0, 0.5], [1.0, 12.0, 12.0, 0.25]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [3, 3], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.7")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test8(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [1,3]
# a_cons = [0.0,0.75]
# num_wanted = 19
# HNF = array([1,0,1,0,1,2])
# params = {'bulk': True, 'nspecies': 2, 'concs': [[1.0, 6.0, 12.0, 0.0], [1.0, 9.0, 12.0, 0.75]], 'basis_vecs': [[0.0, 0.0, 0.0], [0.5, 0.5, 0.5]], 'sizes': [2, 2], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.8")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test9(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [2,5]
# a_cons = [0.25,0.75]
# num_wanted = 738
# HNF = array([1,0,1,0,0,7])
# params = {'bulk': True, 'nspecies': 2, 'concs': [[1.0, 6.0, 12.0, 0.25], [1.0, 9.0, 12.0, 0.75]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [7, 7], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.9")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test10(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [1,1,1,1]
# a_cons = [0.0,0.0,1.0,1.0]
# num_wanted = 36
# HNF = array([1,0,2,0,0,2])
# params = {'bulk': True, 'nspecies': 4, 'concs': [[0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 1.0], [0.0, 4.0, 4.0, 1.0]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [4, 4], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.10")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test11(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [1,0,1,1,1]
# a_cons = [0.0,0.0,0.0,1.0,1.0]
# num_wanted = 36
# HNF = array([1,0,2,0,0,2])
# params = {'bulk': True, 'nspecies': 4, 'concs': [[0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 1.0], [0.0, 4.0, 4.0, 1.0]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [4, 4], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.11")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test12(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [2,0,5]
# a_cons = [0.25,0.0,0.75]
# num_wanted = 738
# HNF = array([1,0,1,0,0,7])
# params = {'bulk': True, 'nspecies': 2, 'concs': [[1.0, 6.0, 12.0, 0.25], [1.0, 9.0, 12.0, 0.75]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [7, 7], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.12")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,True),out)
# def test13(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [0,0,7]
# a_cons = [0.0,0.0,0.0]
# num_wanted = 738
# HNF = array([1,0,1,0,0,7])
# params = {'bulk': True, 'nspecies': 2, 'concs': [[1.0, 6.0, 12.0, 0.25], [1.0, 9.0, 12.0, 0.75]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [7, 7], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = []
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,False),out)
# def test14(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = None
# concs = [1,0,1,1,1]
# a_cons = [0.0,0.0,0.0,1.0,1.0]
# num_wanted = 36
# HNF = array([1,0,2,0,0,2])
# params = {'bulk': True, 'nspecies': 4, 'concs': [[0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 0.0], [0.0, 4.0, 4.0, 1.0], [0.0, 4.0, 4.0, 1.0]], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [4, 4], 'lat_vecs': [[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], 'arrows': True, 'is_crestricted': True}
# out = _read_output("enum_sys.out.14")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,False),out)
# def test15(self):
# from phenum.phonons import enum_sys
# from numpy import array
# groupfile = "tests/phonons/test_group.5"
# concs = [4,4]
# a_cons = [0,0]
# num_wanted = 10
# HNF = array([1,0,2,0,0,4])
# params = {'bulk': True, 'nspecies': 2, 'concs': [], 'basis_vecs': [[0.0, 0.0, 0.0]], 'sizes': [1, 11], 'lat_vecs': [[0.5, 0.5, 0.0], [0.5, 0.0, 0.5], [0.0, 0.5, 0.5]], 'arrows': False, 'is_crestricted': False}
# out = _read_output("enum_sys.out.15")
# self.assertEqual(enum_sys(groupfile,concs,a_cons,num_wanted,HNF,params,False),out)
class TestAddArrows(ut.TestCase):
"""Tests of the add_arrows subroutine."""
def test1(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 2], [-1, 1], [1, 2], [-1, 2], [1, 2], [-1, 3]]
agroup = _read_output("add_arrow_group.in.1")
dim = 6
out = [[[-1, 2], [-1, 1], [0, 2], [-1, 2], [0, 2], [-1, 3]], [[-1, 2], [-1, 1], [1, 2], [-1, 2], [0, 2], [-1, 3]], [[-1, 2], [-1, 1], [2, 2], [-1, 2], [0, 2], [-1, 3]], [[-1, 2], [-1, 1], [5, 2], [-1, 2], [0, 2], [-1, 3]], [[-1, 2], [-1, 1], [0, 2], [-1, 2], [2, 2], [-1, 3]], [[-1, 2], [-1, 1], [2, 2], [-1, 2], [2, 2], [-1, 3]], [[-1, 2], [-1, 1], [3, 2], [-1, 2], [2, 2], [-1, 3]], [[-1, 2], [-1, 1], [4, 2], [-1, 2], [2, 2], [-1, 3]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test2(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [1, 2], [-1, 2], [1, 2]]
agroup = _read_output("add_arrow_group.in.2")
dim = 6
out = [[[-1, 1], [0, 2], [-1, 2], [0, 2]], [[-1, 1], [1, 2], [-1, 2], [0, 2]], [[-1, 1], [5, 2], [-1, 2], [0, 2]], [[-1, 1], [0, 2], [-1, 2], [1, 2]], [[-1, 1], [1, 2], [-1, 2], [1, 2]], [[-1, 1], [2, 2], [-1, 2], [1, 2]], [[-1, 1], [4, 2], [-1, 2], [1, 2]], [[-1, 1], [5, 2], [-1, 2], [1, 2]], [[-1, 1], [0, 2], [-1, 2], [5, 2]], [[-1, 1], [1, 2], [-1, 2], [5, 2]], [[-1, 1], [5, 2], [-1, 2], [5, 2]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test3(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [1, 2], [1, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]]
agroup = _read_output("add_arrow_group.in.3")
dim = 6
out = [[[-1, 1], [0, 2], [0, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [1, 2], [0, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [5, 2], [0, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [0, 2], [1, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [1, 2], [1, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [2, 2], [1, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [4, 2], [1, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [5, 2], [1, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [0, 2], [5, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [1, 2], [5, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [5, 2], [5, 2], [-1, 1], [-1, 2], [0, 2], [-1, 2]], [[-1, 1], [0, 2], [0, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [1, 2], [0, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [2, 2], [0, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [4, 2], [0, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [5, 2], [0, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [0, 2], [1, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [1, 2], [1, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [2, 2], [1, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [4, 2], [1, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [0, 2], [2, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [2, 2], [2, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [3, 2], [2, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [4, 2], [2, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [0, 2], [4, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [4, 2], [4, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]], [[-1, 1], [0, 2], [5, 2], [-1, 1], [-1, 2], [1, 2], [-1, 2]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test4(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [1, 3], [1, 4], [-1, 2]]
agroup = _read_output("add_arrow_group.in.4")
dim = 6
out = [[[-1, 1], [0, 3], [0, 4], [-1, 2]], [[-1, 1], [1, 3], [0, 4], [-1, 2]], [[-1, 1], [2, 3], [0, 4], [-1, 2]], [[-1, 1], [5, 3], [0, 4], [-1, 2]], [[-1, 1], [0, 3], [1, 4], [-1, 2]], [[-1, 1], [1, 3], [1, 4], [-1, 2]], [[-1, 1], [2, 3], [1, 4], [-1, 2]], [[-1, 1], [4, 3], [1, 4], [-1, 2]], [[-1, 1], [0, 3], [2, 4], [-1, 2]], [[-1, 1], [1, 3], [2, 4], [-1, 2]], [[-1, 1], [2, 3], [2, 4], [-1, 2]], [[-1, 1], [3, 3], [2, 4], [-1, 2]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test5(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [-1, 2], [1, 2], [-1, 1], [-1, 1], [-1, 1]]
agroup = _read_output("add_arrow_group.in.5")
dim = 6
out = [[[-1, 1], [-1, 2], [0, 2], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 2], [1, 2], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 2], [3, 2], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 2], [5, 2], [-1, 1], [-1, 1], [-1, 1]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test6(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [-1, 1], [-1, 2], [1, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1]]
agroup = _read_output("add_arrow_group.in.6")
dim = 6
out = [[[-1, 1], [-1, 1], [-1, 2], [0, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 1], [-1, 2], [1, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 1], [-1, 2], [3, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1]], [[-1, 1], [-1, 1], [-1, 2], [5, 2], [-1, 1], [-1, 1], [-1, 1], [-1, 1], [-1, 1]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test7(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [-1, 1], [-1, 3], [1, 2]]
agroup = _read_output("add_arrow_group.in.7")
dim = 6
out = [[[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [-1, 1], [-1, 3], [0, 2]], [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [-1, 1], [-1, 3], [1, 2]], [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [-1, 1], [-1, 3], [5, 2]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test8(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [1, 2], [-1, 3], [-1, 1]]
agroup = _read_output("add_arrow_group.in.8")
dim = 6
out = [[[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [0, 2], [-1, 3], [-1, 1]], [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [1, 2], [-1, 3], [-1, 1]], [[-1, 3], [-1, 3], [-1, 1], [-1, 3], [-1, 2], [-1, 3], [-1, 3], [5, 2], [-1, 3], [-1, 1]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test9(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [-1, 3], [-1, 2], [1, 4], [1, 2], [-1, 4], [-1, 3], [-1, 1]]
agroup = _read_output("add_arrow_group.in.9")
dim = 6
out = [[[-1, 1], [-1, 3], [-1, 2], [0, 4], [0, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [1, 4], [0, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [2, 4], [0, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [5, 4], [0, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [0, 4], [1, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [1, 4], [1, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [2, 4], [1, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [4, 4], [1, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [0, 4], [2, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [1, 4], [2, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [2, 4], [2, 2], [-1, 4], [-1, 3], [-1, 1]], [[-1, 1], [-1, 3], [-1, 2], [3, 4], [2, 2], [-1, 4], [-1, 3], [-1, 1]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
def test10(self):
from phenum.grouptheory import get_sym_group
from phenum.phonons import add_arrows
col = [[-1, 1], [-1, 2], [-1, 3], [1, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]]
agroup = _read_output("add_arrow_group.in.10")
dim = 6
out = [[[-1, 1], [-1, 2], [-1, 3], [0, 2], [-1, 3], [-1, 4], [-1, 1], [0, 4]], [[-1, 1], [-1, 2], [-1, 3], [1, 2], [-1, 3], [-1, 4], [-1, 1], [0, 4]], [[-1, 1], [-1, 2], [-1, 3], [2, 2], [-1, 3], [-1, 4], [-1, 1], [0, 4]], [[-1, 1], [-1, 2], [-1, 3], [5, 2], [-1, 3], [-1, 4], [-1, 1], [0, 4]], [[-1, 1], [-1, 2], [-1, 3], [0, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]], [[-1, 1], [-1, 2], [-1, 3], [1, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]], [[-1, 1], [-1, 2], [-1, 3], [2, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]], [[-1, 1], [-1, 2], [-1, 3], [4, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]], [[-1, 1], [-1, 2], [-1, 3], [5, 2], [-1, 3], [-1, 4], [-1, 1], [1, 4]], [[-1, 1], [-1, 2], [-1, 3], [0, 2], [-1, 3], [-1, 4], [-1, 1], [2, 4]], [[-1, 1], [-1, 2], [-1, 3], [1, 2], [-1, 3], [-1, 4], [-1, 1], [2, 4]], [[-1, 1], [-1, 2], [-1, 3], [2, 2], [-1, 3], [-1, 4], [-1, 1], [2, 4]], [[-1, 1], [-1, 2], [-1, 3], [3, 2], [-1, 3], [-1, 4], [-1, 1], [2, 4]], [[-1, 1], [-1, 2], [-1, 3], [5, 2], [-1, 3], [-1, 4], [-1, 1], [2, 4]], [[-1, 1], [-1, 2], [-1, 3], [0, 2], [-1, 3], [-1, 4], [-1, 1], [5, 4]], [[-1, 1], [-1, 2], [-1, 3], [1, 2], [-1, 3], [-1, 4], [-1, 1], [5, 4]], [[-1, 1], [-1, 2], [-1, 3], [2, 2], [-1, 3], [-1, 4], [-1, 1], [5, 4]], [[-1, 1], [-1, 2], [-1, 3], [5, 2], [-1, 3], [-1, 4], [-1, 1], [5, 4]]]
self.assertEqual(add_arrows(col,agroup,dim,agroup[0:len(col)],supers=True),out)
| 49.294393
| 1,688
| 0.437261
| 5,308
| 31,647
| 2.532781
| 0.023926
| 0.105326
| 0.103317
| 0.086879
| 0.943544
| 0.929708
| 0.906204
| 0.872359
| 0.829441
| 0.779009
| 0
| 0.155711
| 0.277973
| 31,647
| 641
| 1,689
| 49.371295
| 0.432648
| 0.198913
| 0
| 0.615196
| 0
| 0
| 0.049939
| 0.005989
| 0
| 0
| 0
| 0
| 0.115196
| 1
| 0.117647
| false
| 0
| 0.154412
| 0
| 0.286765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
69051d5144109c17a90d083d2a9e4c2b8a9f9b26
| 2,216
|
py
|
Python
|
core/logos.py
|
xbomber00/x
|
df6bc45dbb0c40eaeecea97bbfbdfaf34f49beb8
|
[
"MIT"
] | 14
|
2021-07-13T06:17:27.000Z
|
2022-03-21T07:04:56.000Z
|
core/logos.py
|
xbomber00/x
|
df6bc45dbb0c40eaeecea97bbfbdfaf34f49beb8
|
[
"MIT"
] | 3
|
2021-07-26T19:49:03.000Z
|
2022-03-22T14:07:20.000Z
|
core/logos.py
|
xbomber00/x
|
df6bc45dbb0c40eaeecea97bbfbdfaf34f49beb8
|
[
"MIT"
] | 3
|
2022-01-29T06:36:16.000Z
|
2022-02-20T15:58:44.000Z
|
#coding=utf-8
acl='\033[1;30m'
rcl='\033[1;31m'
gcl='\033[1;32m'
ycl='\033[1;33m'
bcl = '\033[1;34m'
pcl = '\033[1;35m'
ccl='\033[1;36m'
wcl='\033[1;37m'
mcl = '\033[1;94m'
ncl='\033[0;00m'
rcl='\033[1;31m'
gcl='\033[1;32m'
ycl='\033[1;33m'
bcl = '\033[1;34m'
pcl = '\033[1;35m'
ccl='\033[1;36m'
wcl='\033[1;37m'
mcl = '\033[1;94m'
ncl='\033[0;00m'
logomao=f"""
\033[1;33m {bcl}V.1.{gcl}5{ycl} _ _ \033[1;36m ____ ____ \033[1;32m ____ ___ __ __ \033[1;30mTHBD\033[0;00m
\033[1;33m|_ _| | | |\033[1;36m| __ )| _ \ \033[1;32m | __ ) / _ \| \/ | __ )
\033[1;33m | | | |_| |\033[1;36m| _ \| | | | \033[1;32m | _ \| | | | |\/| | _ \
\033[1;33m | | | _ |\033[1;36m| |_) | |_| |\033[1;32m | |_) | |_| | | | | |_) |
\033[1;33m |_| |_| |_|\033[1;36m|____/|____/\033[1;32m___|____/ \___/|_| |_|____/
|_\033[1;30mMAO\033[1;32m_|
\033[1;30m[ \033[1;34mAUTHER \033[1;30m] \033[1;32mTERMUX \033[1;32mHACKER\033[1;31m BD
\033[1;30m[\033[1;34m GITHUB\033[1;30m ] \033[1;34mMAO2116
\033[1;30m[ \033[1;34mCODER \033[1;30m] \033[1;30mMAO2116
\033[0;00m"""
logomaodata=f"""
\033[1;33m {bcl}V.1.{gcl}5{ycl} _ _ \033[1;36m ____ ____ \033[1;32m ____ ___ __ __ \033[1;30mTHBD\033[0;00m
\033[1;33m|_ _| | | |\033[1;36m| __ )| _ \ \033[1;32m | __ ) / _ \| \/ | __ )
\033[1;33m | | | |_| |\033[1;36m| _ \| | | | \033[1;32m | _ \| | | | |\/| | _ \
\033[1;33m | | | _ |\033[1;36m| |_) | |_| |\033[1;32m | |_) | |_| | | | | |_) |
\033[1;33m |_| |_| |_|\033[1;36m|____/|____/\033[1;32m___|____/ \___/|_| |_|____/
|_\033[1;30mMAO\033[1;32m_|
\033[1;30m[ \033[1;34mAUTHER \033[1;30m] \033[1;32mTERMUX \033[1;32mHACKER\033[1;31m BD
\033[1;30m[\033[1;34m GITHUB\033[1;30m ] \033[1;34mMAO2116
\033[1;30m[ \033[1;34mCODER \033[1;30m] \033[1;30mMAO2116
[ {bcl}ANALYSING DATA {acl}] {gcl}ONLINE
\033[0;00m"""
| 39.571429
| 119
| 0.447653
| 309
| 2,216
| 2.757282
| 0.142395
| 0.380282
| 0.115023
| 0.140845
| 0.906103
| 0.906103
| 0.906103
| 0.906103
| 0.906103
| 0.906103
| 0
| 0.353286
| 0.306408
| 2,216
| 55
| 120
| 40.290909
| 0.201041
| 0.005415
| 0
| 0.904762
| 0
| 0.380952
| 0.913754
| 0.128915
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
69157a719f304332820e615964814fcd4cbb6ddb
| 138
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowRunRoute/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowRunRoute/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowRunRoute/cli/equal/golden_output_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'routes':['ip route 10.64.67.187 255.255.255.255 9.30.0.1', 'ip route 10.64.67.188 255.255.255.255 9.30.0.1']
}
| 23
| 113
| 0.630435
| 31
| 138
| 2.774194
| 0.483871
| 0.418605
| 0.418605
| 0.255814
| 0.697674
| 0.395349
| 0.395349
| 0.395349
| 0
| 0
| 0
| 0.440678
| 0.144928
| 138
| 5
| 114
| 27.6
| 0.288136
| 0
| 0
| 0
| 0
| 0.666667
| 0.710145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
69324b433e09ea7604c5fa5e00b5288fab088c71
| 9,034
|
py
|
Python
|
accelbyte_py_sdk/api/ugc/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/ugc/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/ugc/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
"""Auto-generated package that contains models used by the justice-ugc-service."""
__version__ = "2.1.0"
__author__ = "AccelByte"
__email__ = "dev@accelbyte.net"
# pylint: disable=line-too-long
# admin_channel
from .wrappers import admin_create_channel
from .wrappers import admin_create_channel_async
from .wrappers import admin_delete_channel
from .wrappers import admin_delete_channel_async
from .wrappers import admin_get_channel
from .wrappers import admin_get_channel_async
from .wrappers import admin_update_channel
from .wrappers import admin_update_channel_async
from .wrappers import single_admin_delete_channel
from .wrappers import single_admin_delete_channel_async
from .wrappers import single_admin_get_channel
from .wrappers import single_admin_get_channel_async
from .wrappers import single_admin_update_channel
from .wrappers import single_admin_update_channel_async
# admin_content
from .wrappers import admin_delete_content
from .wrappers import admin_delete_content_async
from .wrappers import admin_delete_content_screenshot
from .wrappers import admin_delete_content_screenshot_async
from .wrappers import admin_download_content_preview
from .wrappers import admin_download_content_preview_async
from .wrappers import admin_get_content
from .wrappers import admin_get_content_async
from .wrappers import admin_get_specific_content
from .wrappers import admin_get_specific_content_async
from .wrappers import admin_hide_user_content
from .wrappers import admin_hide_user_content_async
from .wrappers import admin_search_channel_specific_content
from .wrappers import admin_search_channel_specific_content_async
from .wrappers import admin_search_content
from .wrappers import admin_search_content_async
from .wrappers import admin_update_content_direct
from .wrappers import admin_update_content_direct_async
from .wrappers import admin_update_content_s3
from .wrappers import admin_update_content_s3_async
from .wrappers import admin_update_screenshots
from .wrappers import admin_update_screenshots_async
from .wrappers import admin_upload_content_direct
from .wrappers import admin_upload_content_direct_async
from .wrappers import admin_upload_content_s3
from .wrappers import admin_upload_content_s3_async
from .wrappers import admin_upload_content_screenshot
from .wrappers import admin_upload_content_screenshot_async
from .wrappers import single_admin_delete_content
from .wrappers import single_admin_delete_content_async
from .wrappers import single_admin_get_content
from .wrappers import single_admin_get_content_async
from .wrappers import single_admin_update_content_direct
from .wrappers import single_admin_update_content_direct_async
from .wrappers import single_admin_update_content_s3
from .wrappers import single_admin_update_content_s3_async
# admin_group
from .wrappers import admin_create_group
from .wrappers import admin_create_group_async
from .wrappers import admin_delete_group
from .wrappers import admin_delete_group_async
from .wrappers import admin_get_all_groups
from .wrappers import admin_get_all_groups_async
from .wrappers import admin_get_group
from .wrappers import admin_get_group_async
from .wrappers import admin_get_group_contents
from .wrappers import admin_get_group_contents_async
from .wrappers import admin_update_group
from .wrappers import admin_update_group_async
from .wrappers import single_admin_delete_group
from .wrappers import single_admin_delete_group_async
from .wrappers import single_admin_get_all_groups
from .wrappers import single_admin_get_all_groups_async
from .wrappers import single_admin_get_group
from .wrappers import single_admin_get_group_async
from .wrappers import single_admin_get_group_contents
from .wrappers import single_admin_get_group_contents_async
from .wrappers import single_admin_update_group
from .wrappers import single_admin_update_group_async
# admin_tag
from .wrappers import admin_create_tag
from .wrappers import admin_create_tag_async
from .wrappers import admin_delete_tag
from .wrappers import admin_delete_tag_async
from .wrappers import admin_get_tag
from .wrappers import admin_get_tag_async
from .wrappers import admin_update_tag
from .wrappers import admin_update_tag_async
# admin_type
from .wrappers import admin_create_type
from .wrappers import admin_create_type_async
from .wrappers import admin_delete_type
from .wrappers import admin_delete_type_async
from .wrappers import admin_get_type
from .wrappers import admin_get_type_async
from .wrappers import admin_update_type
from .wrappers import admin_update_type_async
# anonymization
from .wrappers import admin_delete_all_user_channels
from .wrappers import admin_delete_all_user_channels_async
from .wrappers import admin_delete_all_user_contents
from .wrappers import admin_delete_all_user_contents_async
from .wrappers import admin_delete_all_user_group
from .wrappers import admin_delete_all_user_group_async
from .wrappers import admin_delete_all_user_states
from .wrappers import admin_delete_all_user_states_async
from .wrappers import delete_all_user_channel
from .wrappers import delete_all_user_channel_async
from .wrappers import delete_all_user_contents
from .wrappers import delete_all_user_contents_async
from .wrappers import delete_all_user_group
from .wrappers import delete_all_user_group_async
from .wrappers import delete_all_user_states
from .wrappers import delete_all_user_states_async
# public_channel
from .wrappers import create_channel
from .wrappers import create_channel_async
from .wrappers import delete_channel
from .wrappers import delete_channel_async
from .wrappers import get_channels
from .wrappers import get_channels_async
from .wrappers import update_channel
from .wrappers import update_channel_async
# public_content
from .wrappers import create_content_direct
from .wrappers import create_content_direct_async
from .wrappers import create_content_s3
from .wrappers import create_content_s3_async
from .wrappers import delete_content
from .wrappers import delete_content_async
from .wrappers import delete_content_screenshot
from .wrappers import delete_content_screenshot_async
from .wrappers import download_content_by_share_code
from .wrappers import download_content_by_share_code_async
from .wrappers import public_download_content_by_content_id
from .wrappers import public_download_content_by_content_id_async
from .wrappers import public_download_content_preview
from .wrappers import public_download_content_preview_async
from .wrappers import public_get_content_bulk
from .wrappers import public_get_content_bulk_async
from .wrappers import public_get_user_content
from .wrappers import public_get_user_content_async
from .wrappers import public_search_content
from .wrappers import public_search_content_async
from .wrappers import search_channel_specific_content
from .wrappers import search_channel_specific_content_async
from .wrappers import update_content_direct
from .wrappers import update_content_direct_async
from .wrappers import update_content_s3
from .wrappers import update_content_s3_async
from .wrappers import update_screenshots
from .wrappers import update_screenshots_async
from .wrappers import upload_content_screenshot
from .wrappers import upload_content_screenshot_async
# public_creator
from .wrappers import get_creator
from .wrappers import get_creator_async
# public_download_count
from .wrappers import add_download_count
from .wrappers import add_download_count_async
# public_follow
from .wrappers import get_followed_content
from .wrappers import get_followed_content_async
from .wrappers import get_followed_users
from .wrappers import get_followed_users_async
from .wrappers import get_public_followers
from .wrappers import get_public_followers_async
from .wrappers import get_public_following
from .wrappers import get_public_following_async
from .wrappers import update_user_follow_status
from .wrappers import update_user_follow_status_async
# public_group
from .wrappers import create_group
from .wrappers import create_group_async
from .wrappers import delete_group
from .wrappers import delete_group_async
from .wrappers import get_group
from .wrappers import get_group_async
from .wrappers import get_group_content
from .wrappers import get_group_content_async
from .wrappers import get_groups
from .wrappers import get_groups_async
from .wrappers import update_group
from .wrappers import update_group_async
# public_like
from .wrappers import get_liked_content
from .wrappers import get_liked_content_async
from .wrappers import update_content_like_status
from .wrappers import update_content_like_status_async
# public_tag
from .wrappers import get_tag
from .wrappers import get_tag_async
# public_type
from .wrappers import get_type
from .wrappers import get_type_async
| 40.693694
| 82
| 0.883883
| 1,314
| 9,034
| 5.658295
| 0.077626
| 0.284062
| 0.426093
| 0.225824
| 0.93154
| 0.886214
| 0.603631
| 0.197579
| 0.01345
| 0
| 0
| 0.002068
| 0.089993
| 9,034
| 221
| 83
| 40.877828
| 0.902323
| 0.062431
| 0
| 0
| 1
| 0
| 0.003672
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.98324
| 0
| 0.98324
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
69725ef7177d7d22c67e31104c33c5758e58147f
| 70
|
py
|
Python
|
vpn-node-docker/sentinel/vpn/__init__.py
|
14avengers/sentinel
|
825768d2242ad28896c41684bc08e8527cdf2f30
|
[
"MIT"
] | null | null | null |
vpn-node-docker/sentinel/vpn/__init__.py
|
14avengers/sentinel
|
825768d2242ad28896c41684bc08e8527cdf2f30
|
[
"MIT"
] | null | null | null |
vpn-node-docker/sentinel/vpn/__init__.py
|
14avengers/sentinel
|
825768d2242ad28896c41684bc08e8527cdf2f30
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from .openvpn import Keys
from .openvpn import OpenVPN
| 17.5
| 28
| 0.785714
| 11
| 70
| 5
| 0.636364
| 0.4
| 0.618182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.142857
| 70
| 3
| 29
| 23.333333
| 0.9
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
15f74a5e83c41f953250099a5c6ed321ad7adf9e
| 1,816
|
py
|
Python
|
CursoEmVideo/Aula14/ex059.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
CursoEmVideo/Aula14/ex059.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
CursoEmVideo/Aula14/ex059.py
|
lucashsouza/Desafios-Python
|
abb5b11ebdfd4c232b4f0427ef41fd96013f2802
|
[
"MIT"
] | null | null | null |
opcao = 0
r = int()
while opcao != 5:
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo valor: '))
print('')
print('''[1] Somar
[2] Multiplicar
[3] Maior
[4] Novos numeros
[5] Sair do programa ''')
print('')
opcao = int(input('Opção: '))
print('')
if opcao == 1:
r = n1 + n2
print('A soma entre {} e {} é igual a {}'.format(n1, n2, r))
print('')
elif opcao == 2:
r = n1 * n2
print('A multiplicação entre {} e {} é igual a {}'.format(n1, n2, r))
print('')
elif opcao == 3:
if n1 > n2:
r = n1
elif n2 > n1:
r = n2
print('O maior valor entre {} e {} é {}'.format(n1, n2, r))
print('')
elif opcao == 4:
print('Informe os números novamente! ')
print('')
n1 = int(input('Primeiro valor: '))
n2 = int(input('Segundo número: '))
print('''
[1] Somar
[2] Multiplicar
[3] Maior
[4] Novos numeros
[5] Sair do programa ''')
print('')
opcao = int(input('Sua opção: '))
if opcao == 1:
r = n1 + n2
print('A soma entre {} e {} é igual a {}'.format(n1, n2, r))
print('')
elif opcao == 2:
r = n1 * n2
print('A multiplicação entre {} e {} é igual a {}'.format(n1, n2, r))
print('')
elif opcao == 3:
if n1 > n2:
r = n1
elif n2 > n1:
r = n2
print('O maior valor entre {} e {} é {}'.format(n1, n2, r))
print('')
elif opcao == 5:
print('Finalizando..')
elif opcao == 5:
print('Finalizando..')
else:
print('Opção inválida! ')
| 24.876712
| 82
| 0.42511
| 217
| 1,816
| 3.557604
| 0.211982
| 0.062176
| 0.051813
| 0.085492
| 0.853627
| 0.797927
| 0.797927
| 0.797927
| 0.797927
| 0.694301
| 0
| 0.053421
| 0.412445
| 1,816
| 72
| 83
| 25.222222
| 0.670103
| 0
| 0
| 0.777778
| 0
| 0
| 0.297018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.365079
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c631d24fb8063ac7cac791f9153babb633d0b5f1
| 6,897
|
py
|
Python
|
tests/tests_meshanim_parser/test_convert_all_simple.py
|
Conex94/fbx-parser
|
856548e00f4eb5c3d312a8b9ce2054fd78bea812
|
[
"MIT"
] | null | null | null |
tests/tests_meshanim_parser/test_convert_all_simple.py
|
Conex94/fbx-parser
|
856548e00f4eb5c3d312a8b9ce2054fd78bea812
|
[
"MIT"
] | null | null | null |
tests/tests_meshanim_parser/test_convert_all_simple.py
|
Conex94/fbx-parser
|
856548e00f4eb5c3d312a8b9ce2054fd78bea812
|
[
"MIT"
] | null | null | null |
import unittest
import argparse
from fbx_parser.fbx_parser_rework import FbxParser
class fbx_parser_tests(unittest.TestCase):
def test_convert_animation(self):
'''
Clear the Test Output files
:return:
'''
try:
fbxparser = FbxParser()
parser = argparse.ArgumentParser()
parser.add_argument('--inpath', action='store', dest='path_in',
default=".", help='Select the path of the file to parse')
parser.add_argument('--infile', action='store', dest='filename_in',
default="None.fbx", help='Select the file to parse')
parser.add_argument('--outpath', action='store', dest='path_out',
default=".", help='Choose the output folder')
parser.add_argument('--outfile', action='store', dest='filename_out',
default=".", help='Enter the target filename')
parser.add_argument('--mode', action='store', dest='mode',
default="model", help='Pick between map or model')
parser.add_argument('--group', action='store', dest='group',
default="None", help='If this Animation is part of a specific Group, like \"Human\"')
results = parser.parse_args()
results.path_in = '..//..//testfiles//tests_meshanim_parser//input_files//fbxfiles'
results.filename_in = 'test_animation.fbx'
results.path_out = '..//..//testfiles//tests_meshanim_parser//output_files//animation'
results.filename_out = 'animation'
fbxparser._convert_auto(results)
self.assertTrue(True)
except Exception as e:
print(e)
self.assertTrue(False)
def test_convert_static(self):
'''
Clear the Test Output files
:return:
'''
try:
fbxparser = FbxParser()
parser = argparse.ArgumentParser()
parser.add_argument('--inpath', action='store', dest='path_in',
default=".", help='Select the path of the file to parse')
parser.add_argument('--infile', action='store', dest='filename_in',
default="None.fbx", help='Select the file to parse')
parser.add_argument('--outpath', action='store', dest='path_out',
default=".", help='Choose the output folder')
parser.add_argument('--outfile', action='store', dest='filename_out',
default=".", help='Enter the target filename')
parser.add_argument('--mode', action='store', dest='mode',
default="model", help='Pick between map or model')
parser.add_argument('--group', action='store', dest='group',
default="None", help='If this Animation is part of a specific Group, like \"Human\"')
results = parser.parse_args()
results.path_in = '..//..//testfiles//tests_meshanim_parser//input_files//fbxfiles'
results.filename_in = 'test_sphere.fbx'
results.path_out = '..//..//testfiles//tests_meshanim_parser//output_files//models'
results.filename_out = 'sphere'
fbxparser._convert_auto(results)
self.assertTrue(True)
except:
self.assertTrue(False)
def test_convert_skinned(self):
'''
Clear the Test Output files
:return:
'''
try:
fbxparser = FbxParser()
parser = argparse.ArgumentParser()
parser.add_argument('--inpath', action='store', dest='path_in',
default=".", help='Select the path of the file to parse')
parser.add_argument('--infile', action='store', dest='filename_in',
default="None.fbx", help='Select the file to parse')
parser.add_argument('--outpath', action='store', dest='path_out',
default=".", help='Choose the output folder')
parser.add_argument('--outfile', action='store', dest='filename_out',
default=".", help='Enter the target filename')
parser.add_argument('--mode', action='store', dest='mode',
default="model", help='Pick between map or model')
parser.add_argument('--group', action='store', dest='group',
default="None", help='If this Animation is part of a specific Group, like \"Human\"')
results = parser.parse_args()
results.path_in = '..//..//testfiles//tests_meshanim_parser//input_files//fbxfiles'
results.filename_in = 'test_skinned.fbx'
results.path_out = '..//..//testfiles//tests_meshanim_parser//output_files//models'
results.filename_out = 'skinned'
fbxparser._convert_auto(results)
self.assertTrue(True)
except:
self.assertTrue(False)
def test_convert_skinned_single(self):
'''
Clear the Test Output files
:return:
'''
try:
fbxparser = FbxParser()
parser = argparse.ArgumentParser()
parser.add_argument('--inpath', action='store', dest='path_in',
default=".", help='Select the path of the file to parse')
parser.add_argument('--infile', action='store', dest='filename_in',
default="None.fbx", help='Select the file to parse')
parser.add_argument('--outpath', action='store', dest='path_out',
default=".", help='Choose the output folder')
parser.add_argument('--outfile', action='store', dest='filename_out',
default=".", help='Enter the target filename')
parser.add_argument('--mode', action='store', dest='mode',
default="model", help='Pick between map or model')
parser.add_argument('--group', action='store', dest='group',
default="None", help='If this Animation is part of a specific Group, like \"Human\"')
results = parser.parse_args()
results.path_in = '..//..//testfiles//tests_meshanim_parser//input_files//fbxfiles'
results.filename_in = 'test_single_skinned.fbx'
results.path_out = '..//..//testfiles//tests_meshanim_parser//output_files//models'
results.filename_out = 'single_skinned'
fbxparser._convert_auto(results)
self.assertTrue(True)
except:
self.assertTrue(False)
| 39.637931
| 117
| 0.548064
| 694
| 6,897
| 5.285303
| 0.119597
| 0.058888
| 0.111232
| 0.041439
| 0.937841
| 0.937841
| 0.928844
| 0.928844
| 0.91494
| 0.91494
| 0
| 0
| 0.320429
| 6,897
| 174
| 118
| 39.637931
| 0.78259
| 0.021314
| 0
| 0.819048
| 0
| 0
| 0.296861
| 0.079384
| 0
| 0
| 0
| 0
| 0.07619
| 1
| 0.038095
| false
| 0
| 0.028571
| 0
| 0.07619
| 0.009524
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d689aac7532bad5929dd08791a5c64a96bd4f52c
| 19,840
|
py
|
Python
|
library/binance/futures.py
|
danyanyam/ftx
|
32076bc1135e5a1e2bc800f4fff8dff9d7da18f1
|
[
"MIT"
] | 2
|
2021-09-23T22:59:24.000Z
|
2021-09-24T05:49:35.000Z
|
library/binance/futures.py
|
danyanyam/ftx
|
32076bc1135e5a1e2bc800f4fff8dff9d7da18f1
|
[
"MIT"
] | null | null | null |
library/binance/futures.py
|
danyanyam/ftx
|
32076bc1135e5a1e2bc800f4fff8dff9d7da18f1
|
[
"MIT"
] | null | null | null |
from base import BaseApiClass
import datetime as dt
class Futures(BaseApiClass):
"""https://binance-docs.github.io/apidocs/spot/en/#futures"""
def __init__(self, api_key: str, secret_key: str):
super().__init__(api_key, secret_key)
def new_future_account_transfer(self,
asset: str = None,
amount: float = None,
type: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#acquiring-algorithm-user_data"""
return self.post('/sapi/v1/futures/transfer',
asset=asset,
amount=amount,
type=type,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def get_future_account_transaction_history_list(self,
asset: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
current: int = None,
size: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#get-future-account-transaction-history-list-user_data"""
return self.get('/sapi/v1/futures/transfer',
asset=asset,
start_time=start_time,
end_time=end_time,
current=current,
size=size,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def borrow_for_cross_collateral(self,
coin: str = None,
amount: float = None,
collateralCoin: str = None,
collateralAmount: float = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#borrow-for-cross-collateral-trade"""
return self.post('/sapi/v1/futures/loan/borrow',
coin=coin,
amount=amount,
collateralCoin=collateralCoin,
collateralAmount=collateralAmount,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_borrow_history(self,
coin: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
limit: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-borrow-history-user_data"""
return self.get('/sapi/v1/futures/loan/borrow/history',
coin=coin,
start_time=start_time,
end_time=end_time,
limit=limit,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def repay_for_cross_collateral(self,
coin: str = None,
collateralCoin: str = None,
amount: float = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#repay-for-cross-collateral-trade"""
return self.post('/sapi/v1/futures/loan/repay',
coin=coin,
collateralCoin=collateralCoin,
amount=amount,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_repayment_history(self,
coin: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
limit: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-repayment-history-user_data"""
return self.get('/sapi/v1/futures/loan/repay/history',
coin=coin,
start_time=start_time,
end_time=end_time,
limit=limit,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_wallet(self,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-wallet-user_data"""
return self.get('/sapi/v1/futures/loan/wallet',
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_wallet_v2(self,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-wallet-v2-user_data"""
return self.get('/sapi/v2/futures/loan/wallet',
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_information(self,
collateralCoin: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-information-user_data"""
return self.get('/sapi/v1/futures/loan/configs',
collateralCoin=collateralCoin,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_information_v2(self,
loanCoin: str = None,
collateralCoin: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-information-v2-user_data"""
return self.get('/sapi/v2/futures/loan/configs',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def calculate_rate_after_adjust_cross_collateral_LTV(self,
collateralCoin: str = None,
amount: float = None,
direction: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#calculate-rate-after-adjust-cross-collateral-ltv-user_data"""
return self.get('/sapi/v1/futures/loan/calcAdjustLevel',
collateralCoin=collateralCoin,
amount=amount,
direction=direction,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def calculate_rate_after_adjust_cross_collateral_LTV_v2(self,
loanCoin: str = None,
collateralCoin: str = None,
amount: float = None,
direction: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#calculate-rate-after-adjust-cross-collateral-ltv-v2-user_data"""
return self.get('/sapi/v2/futures/loan/calcAdjustLevel',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
amount=amount,
direction=direction,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def get_max_amount_for_adjust_cross_collateral_LTV(self,
collateralCoin: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#get-max-amount-for-adjust-cross-collateral-ltv-user_data"""
return self.get('/sapi/v1/futures/loan/calcMaxAdjustAmount',
collateralCoin=collateralCoin,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def get_max_amount_for_adjust_cross_collateral_LTV_v2(self,
loanCoin: str = None,
collateralCoin: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#get-max-amount-for-adjust-cross-collateral-ltv-v2-user_data"""
return self.get('/sapi/v2/futures/loan/calcMaxAdjustAmount',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def adjust_cross_collateral_LTV(self,
collateralCoin: str = None,
amount: float = None,
direction: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#adjust-cross-collateral-ltv-trade"""
return self.post('/sapi/v1/futures/loan/adjustCollateral',
collateralCoin=collateralCoin,
amount=amount,
direction=direction,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def adjust_cross_collateral_LTV_v2(self,
loanCoin: str = None,
collateralCoin: str = None,
amount: float = None,
direction: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#adjust-cross-collateral-ltv-v2-trade"""
return self.post('/sapi/v2/futures/loan/adjustCollateral',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
amount=amount,
direction=direction,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def adjust_cross_collateral_LTV_history(self,
loanCoin: str = None,
collateralCoin: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
limit: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#adjust-cross-collateral-ltv-history-user_data"""
return self.get('/sapi/v1/futures/loan/adjustCollateral/history',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
start_time=start_time,
end_time=end_time,
limit=limit,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_liquidation_history(self,
loanCoin: str = None,
collateralCoin: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
limit: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-liquidation-history-user_data"""
return self.get('/sapi/v1/futures/loan/liquidationHistory',
loanCoin=loanCoin,
collateralCoin=collateralCoin,
start_time=start_time,
end_time=end_time,
limit=limit,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def check_collateral_repay_limit(self,
coin: str = None,
collateralCoin: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#check-collateral-repay-limit-user_data"""
return self.get('/sapi/v1/futures/loan/collateralRepayLimit',
coin=coin,
collateralCoin=collateralCoin,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def get_collateral_repay_quote(self,
coin: str = None,
collateralCoin: str = None,
amount: float = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#get-collateral-repay-quote-user_data"""
return self.get('/sapi/v1/futures/loan/collateralRepay',
coin=coin,
collateralCoin=collateralCoin,
amount=amount,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def repay_with_collateral(self,
quoteId: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#repay-with-collateral-user_data"""
return self.post('/sapi/v1/futures/loan/collateralRepay',
quoteId=quoteId,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def collateral_repayment_result(self,
quoteId: str = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#collateral-repayment-result-user_data"""
return self.get('/sapi/v1/futures/loan/collateralRepayResult',
quoteId=quoteId,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
def cross_collateral_interest_history(self,
collateralCoin: str = None,
start_time: dt.datetime = None,
end_time: dt.datetime = None,
current: int = None,
limit: int = None,
recvWindow: int = None,
time_req: bool = True,
sign: bool = True):
"""https://binance-docs.github.io/apidocs/spot/en/#cross-collateral-interest-history-user_data"""
return self.get('/sapi/v1/futures/loan/interestHistory',
collateralCoin=collateralCoin,
start_time=start_time,
end_time=end_time,
current=current,
limit=limit,
recvWindow=recvWindow,
time_req=time_req,
sign=sign)
| 53.621622
| 123
| 0.407157
| 1,493
| 19,840
| 5.262559
| 0.0643
| 0.061474
| 0.048874
| 0.067201
| 0.897162
| 0.87858
| 0.865088
| 0.85707
| 0.834542
| 0.806033
| 0
| 0.003441
| 0.516633
| 19,840
| 369
| 124
| 53.766938
| 0.81585
| 0.107913
| 0
| 0.821317
| 0
| 0
| 0.045739
| 0.045739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075235
| false
| 0
| 0.00627
| 0
| 0.15674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba74ab766b46a5c6c8ced8bb5e5bae5251b10f9a
| 8,728
|
py
|
Python
|
giskard-ml-worker/test/test_performance.py
|
Giskard-AI/giskard
|
6efdb8ac5f7dbf7c41cee0c9bca49a44028864fe
|
[
"Apache-2.0"
] | 23
|
2022-03-06T21:53:02.000Z
|
2022-03-31T14:53:57.000Z
|
giskard-ml-worker/test/test_performance.py
|
Giskard-AI/giskard
|
6efdb8ac5f7dbf7c41cee0c9bca49a44028864fe
|
[
"Apache-2.0"
] | 21
|
2022-03-07T14:21:58.000Z
|
2022-03-31T11:33:36.000Z
|
giskard-ml-worker/test/test_performance.py
|
Giskard-AI/giskard
|
6efdb8ac5f7dbf7c41cee0c9bca49a44028864fe
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from ml_worker.testing.functions import GiskardTestFunctions
def _test_auc(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_auc(
german_credit_data,
german_credit_model,
threshold=threshold,
target='default'
)
assert results.element_count == 1000
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == 0.709761917591095
return results.passed
def test_auc(german_credit_data, german_credit_model):
assert _test_auc(german_credit_data, german_credit_model, 0.5)
assert not _test_auc(german_credit_data, german_credit_model, 0.8)
def _test_f1(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_f1(
german_credit_data,
german_credit_model,
threshold=threshold,
target='default'
)
assert results.element_count == 1000
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == 0.2661668360233307
return results.passed
def test_f1(german_credit_data, german_credit_model):
assert _test_f1(german_credit_data, german_credit_model, 0.2)
assert not _test_f1(german_credit_data, german_credit_model, 0.3)
def _test_precision(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_precision(
german_credit_data,
german_credit_model,
threshold=threshold,
target='default'
)
assert results.element_count == 1000
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == 0.18513689935207367
return results.passed
def test_precision(german_credit_data, german_credit_model):
assert _test_precision(german_credit_data, german_credit_model, 0.18)
assert not _test_precision(german_credit_data, german_credit_model, 0.19)
def _test_recall(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_recall(
german_credit_data,
german_credit_model,
threshold=threshold,
target='default'
)
assert results.element_count == 1000
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == 0.47333332896232605
return results.passed
def test_recall(german_credit_data, german_credit_model):
assert _test_recall(german_credit_data, german_credit_model, 0.4)
assert not _test_recall(german_credit_data, german_credit_model, 0.5)
def _test_accuracy(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_accuracy(
german_credit_data,
german_credit_model,
threshold=threshold,
target='default'
)
assert results.element_count == 1000
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == 0.21699999272823334
return results.passed
def test_accuracy(german_credit_data, german_credit_model):
assert _test_accuracy(german_credit_data, german_credit_model, 0.2)
assert not _test_accuracy(german_credit_data, german_credit_model, 0.3)
def _test_neg_rmse(diabetes_dataset_with_target, linear_regression_diabetes, threshold):
tests = GiskardTestFunctions()
results = tests.performance.test_neg_rmse(
diabetes_dataset_with_target,
linear_regression_diabetes,
threshold=threshold,
target='target'
)
assert results.element_count == 442
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == -2860.970
return results.passed
def test_neg_rmse(diabetes_dataset_with_target, linear_regression_diabetes):
assert _test_neg_rmse(diabetes_dataset_with_target, linear_regression_diabetes, -2861)
assert not _test_neg_rmse(diabetes_dataset_with_target, linear_regression_diabetes, -2860)
def _test_neg_mae(diabetes_dataset_with_target, linear_regression_diabetes, threshold=-44):
tests = GiskardTestFunctions()
results = tests.performance.test_neg_mae(
diabetes_dataset_with_target,
linear_regression_diabetes,
threshold=threshold,
target='target'
)
assert results.element_count == 442
assert results.missing_count == 0
assert pytest.approx(results.metric, 0.001) == -43.302
return results.passed
def test_neg_mae(diabetes_dataset_with_target, linear_regression_diabetes):
assert _test_neg_mae(diabetes_dataset_with_target, linear_regression_diabetes, -44)
assert not _test_neg_mae(diabetes_dataset_with_target, linear_regression_diabetes, -43)
def _test_r2(diabetes_dataset_with_target, linear_regression_diabetes, threshold):
tests = GiskardTestFunctions()
tests.performance.test_r2(
diabetes_dataset_with_target,
linear_regression_diabetes,
threshold=threshold,
target='target'
)
assert len(tests.tests_results) == 1
test_execution = tests.tests_results[0]
result = test_execution.result
assert test_execution.name == 'test_r2'
assert pytest.approx(result.metric, 0.001) == 0.063
return result.passed
def test_r2(diabetes_dataset_with_target, linear_regression_diabetes):
assert _test_r2(diabetes_dataset_with_target, linear_regression_diabetes, 0.062)
assert not _test_r2(diabetes_dataset_with_target, linear_regression_diabetes, 0.064)
def _test_diff_accuracy(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
tests.performance.test_diff_accuracy(
german_credit_data,
german_credit_model,
filter_1=german_credit_data[german_credit_data.sex == 'male'].index,
filter_2=german_credit_data[german_credit_data.sex == 'female'].index,
threshold=threshold,
target='default'
)
assert len(tests.tests_results) == 1
test_execution = tests.tests_results[0]
result = test_execution.result
assert test_execution.name == 'test_diff_accuracy'
assert pytest.approx(result.metric, 0.001) == 0.12836022675037384
return result.passed
def test_diff_accuracy(german_credit_data, german_credit_model):
assert _test_diff_accuracy(german_credit_data, german_credit_model, 0.2)
assert not _test_diff_accuracy(german_credit_data, german_credit_model, 0.1)
def _test_diff_f1(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
result = tests.performance.test_diff_f1(
german_credit_data,
german_credit_model,
filter_1=german_credit_data[german_credit_data.sex == 'male'].index,
filter_2=german_credit_data[german_credit_data.sex == 'female'].index,
threshold=threshold,
target='default'
)
assert pytest.approx(result.metric, 0.001) == 0.07218418270349503
return result.passed
def test_diff_f1(german_credit_data, german_credit_model):
assert _test_diff_f1(german_credit_data, german_credit_model, 0.08)
assert not _test_diff_f1(german_credit_data, german_credit_model, 0.07)
def _test_diff_recall(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
result = tests.performance.test_diff_recall(
german_credit_data,
german_credit_model,
filter_1=german_credit_data[german_credit_data.sex == 'male'].index,
filter_2=german_credit_data[german_credit_data.sex == 'female'].index,
threshold=threshold,
target='default'
)
assert pytest.approx(result.metric, 0.001) == 0.312826007604599
return result.passed
def test_diff_recall(german_credit_data, german_credit_model):
assert _test_diff_recall(german_credit_data, german_credit_model, 0.4)
assert not _test_diff_recall(german_credit_data, german_credit_model, 0.3)
def _test_diff_precision(german_credit_data, german_credit_model, threshold):
tests = GiskardTestFunctions()
result = tests.performance.test_diff_precision(
german_credit_data,
german_credit_model,
filter_1=german_credit_data[german_credit_data.sex == 'male'].index,
filter_2=german_credit_data[german_credit_data.sex == 'female'].index,
threshold=threshold,
target='default'
)
assert pytest.approx(result.metric, 0.001) == 0.053921569138765335
return result.passed
def test_diff_precision(german_credit_data, german_credit_model):
assert _test_diff_precision(german_credit_data, german_credit_model, 0.06)
assert not _test_diff_precision(german_credit_data, german_credit_model, 0.05 )
| 35.479675
| 94
| 0.75527
| 1,104
| 8,728
| 5.564312
| 0.078804
| 0.207065
| 0.15888
| 0.18981
| 0.94449
| 0.926095
| 0.902979
| 0.891421
| 0.878724
| 0.778773
| 0
| 0.047678
| 0.166132
| 8,728
| 245
| 95
| 35.62449
| 0.796373
| 0
| 0
| 0.531915
| 0
| 0
| 0.016728
| 0
| 0
| 0
| 0
| 0
| 0.287234
| 1
| 0.12766
| false
| 0.06383
| 0.010638
| 0
| 0.202128
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
ba96b84ba468ad31bcf585c98d0c3ce1495c2990
| 293,260
|
py
|
Python
|
swagger_client/apis/products_api.py
|
FengyunPan2/python-harborclient
|
69a55fdb92855d5080232a6e77f3ec9899624071
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/products_api.py
|
FengyunPan2/python-harborclient
|
69a55fdb92855d5080232a6e77f3ec9899624071
|
[
"Apache-2.0"
] | null | null | null |
swagger_client/apis/products_api.py
|
FengyunPan2/python-harborclient
|
69a55fdb92855d5080232a6e77f3ec9899624071
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Harbor API
These APIs provide services for manipulating Harbor project.
OpenAPI spec version: 0.3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def configurations_get(self, **kwargs):
"""
Get system configurations.
This endpoint is for retrieving system configurations that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: Configurations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.configurations_get_with_http_info(**kwargs)
else:
(data) = self.configurations_get_with_http_info(**kwargs)
return data
def configurations_get_with_http_info(self, **kwargs):
"""
Get system configurations.
This endpoint is for retrieving system configurations that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: Configurations
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method configurations_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/configurations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Configurations',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def configurations_put(self, configurations, **kwargs):
"""
Modify system configurations.
This endpoint is for modifying system configurations that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_put(configurations, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Configurations configurations: The configuration map can contain a subset of the attributes of the schema, which are to be updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.configurations_put_with_http_info(configurations, **kwargs)
else:
(data) = self.configurations_put_with_http_info(configurations, **kwargs)
return data
def configurations_put_with_http_info(self, configurations, **kwargs):
"""
Modify system configurations.
This endpoint is for modifying system configurations that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_put_with_http_info(configurations, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Configurations configurations: The configuration map can contain a subset of the attributes of the schema, which are to be updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configurations']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method configurations_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configurations' is set
if ('configurations' not in params) or (params['configurations'] is None):
raise ValueError("Missing the required parameter `configurations` when calling `configurations_put`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'configurations' in params:
body_params = params['configurations']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/configurations', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def configurations_reset_post(self, **kwargs):
"""
Reset system configurations.
Reset system configurations from environment variables. Can only be accessed by admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_reset_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.configurations_reset_post_with_http_info(**kwargs)
else:
(data) = self.configurations_reset_post_with_http_info(**kwargs)
return data
def configurations_reset_post_with_http_info(self, **kwargs):
"""
Reset system configurations.
Reset system configurations from environment variables. Can only be accessed by admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.configurations_reset_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method configurations_reset_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/configurations/reset', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def email_ping_post(self, **kwargs):
"""
Test connection and authentication with email server.
Test connection and authentication with email server.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.email_ping_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param EmailServerSetting settings: Email server settings, if some of the settings are not assigned, they will be read from system configuration.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.email_ping_post_with_http_info(**kwargs)
else:
(data) = self.email_ping_post_with_http_info(**kwargs)
return data
def email_ping_post_with_http_info(self, **kwargs):
"""
Test connection and authentication with email server.
Test connection and authentication with email server.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.email_ping_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param EmailServerSetting settings: Email server settings, if some of the settings are not assigned, they will be read from system configuration.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['settings']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method email_ping_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'settings' in params:
body_params = params['settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/email/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def internal_syncregistry_post(self, **kwargs):
"""
Sync repositories from registry to DB.
This endpoint is for syncing all repositories of registry with database.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.internal_syncregistry_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.internal_syncregistry_post_with_http_info(**kwargs)
else:
(data) = self.internal_syncregistry_post_with_http_info(**kwargs)
return data
def internal_syncregistry_post_with_http_info(self, **kwargs):
"""
Sync repositories from registry to DB.
This endpoint is for syncing all repositories of registry with database.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.internal_syncregistry_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method internal_syncregistry_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/internal/syncregistry', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def jobs_replication_get(self, policy_id, **kwargs):
"""
List filters jobs according to the policy and repository
This endpoint let user list filters jobs according to the policy and repository. (if start_time and end_time are both null, list jobs of last 10 days)
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_get(policy_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int policy_id: The ID of the policy that triggered this job. (required)
:param int num: The return list length number.
:param int end_time: The end time of jobs done. (Timestamp)
:param int start_time: The start time of jobs. (Timestamp)
:param str repository: The respond jobs list filter by repository name.
:param str status: The respond jobs list filter by status.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[JobStatus]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.jobs_replication_get_with_http_info(policy_id, **kwargs)
else:
(data) = self.jobs_replication_get_with_http_info(policy_id, **kwargs)
return data
def jobs_replication_get_with_http_info(self, policy_id, **kwargs):
"""
List filters jobs according to the policy and repository
This endpoint let user list filters jobs according to the policy and repository. (if start_time and end_time are both null, list jobs of last 10 days)
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_get_with_http_info(policy_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int policy_id: The ID of the policy that triggered this job. (required)
:param int num: The return list length number.
:param int end_time: The end time of jobs done. (Timestamp)
:param int start_time: The start time of jobs. (Timestamp)
:param str repository: The respond jobs list filter by repository name.
:param str status: The respond jobs list filter by status.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[JobStatus]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policy_id', 'num', 'end_time', 'start_time', 'repository', 'status', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method jobs_replication_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policy_id' is set
if ('policy_id' not in params) or (params['policy_id'] is None):
raise ValueError("Missing the required parameter `policy_id` when calling `jobs_replication_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'policy_id' in params:
query_params.append(('policy_id', params['policy_id']))
if 'num' in params:
query_params.append(('num', params['num']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'repository' in params:
query_params.append(('repository', params['repository']))
if 'status' in params:
query_params.append(('status', params['status']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/jobs/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[JobStatus]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def jobs_replication_id_delete(self, id, **kwargs):
"""
Delete specific ID job.
This endpoint is aimed to remove specific ID job from jobservice.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Delete job ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.jobs_replication_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.jobs_replication_id_delete_with_http_info(id, **kwargs)
return data
def jobs_replication_id_delete_with_http_info(self, id, **kwargs):
"""
Delete specific ID job.
This endpoint is aimed to remove specific ID job from jobservice.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Delete job ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method jobs_replication_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `jobs_replication_id_delete`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/jobs/replication/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def jobs_replication_id_log_get(self, id, **kwargs):
"""
Get job logs.
This endpoint let user search job logs filtered by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_id_log_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Relevant job ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.jobs_replication_id_log_get_with_http_info(id, **kwargs)
else:
(data) = self.jobs_replication_id_log_get_with_http_info(id, **kwargs)
return data
def jobs_replication_id_log_get_with_http_info(self, id, **kwargs):
"""
Get job logs.
This endpoint let user search job logs filtered by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_replication_id_log_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Relevant job ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method jobs_replication_id_log_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `jobs_replication_id_log_get`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/jobs/replication/{id}/log', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def jobs_scan_id_log_get(self, id, **kwargs):
"""
Get job logs.
This endpoint let user get scan job logs filtered by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_scan_id_log_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Relevant job ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.jobs_scan_id_log_get_with_http_info(id, **kwargs)
else:
(data) = self.jobs_scan_id_log_get_with_http_info(id, **kwargs)
return data
def jobs_scan_id_log_get_with_http_info(self, id, **kwargs):
"""
Get job logs.
This endpoint let user get scan job logs filtered by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.jobs_scan_id_log_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Relevant job ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method jobs_scan_id_log_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `jobs_scan_id_log_get`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/jobs/scan/{id}/log', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ldap_ping_post(self, **kwargs):
"""
Ping available ldap service.
This endpoint ping the available ldap service for test related configuration parameters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_ping_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param LdapConf ldapconf: ldap configuration. support input ldap service configuration. If it's a empty request, will load current configuration from the system.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ldap_ping_post_with_http_info(**kwargs)
else:
(data) = self.ldap_ping_post_with_http_info(**kwargs)
return data
def ldap_ping_post_with_http_info(self, **kwargs):
"""
Ping available ldap service.
This endpoint ping the available ldap service for test related configuration parameters.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_ping_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param LdapConf ldapconf: ldap configuration. support input ldap service configuration. If it's a empty request, will load current configuration from the system.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ldapconf']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ldap_ping_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ldapconf' in params:
body_params = params['ldapconf']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ldap/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ldap_users_import_post(self, uid_list, **kwargs):
"""
Import selected available ldap users.
This endpoint adds the selected available ldap users to harbor based on related configuration parameters from the system. System will try to guess the user email address and realname, add to harbor user information. If have errors when import user, will return the list of importing failed uid and the failed reason.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_users_import_post(uid_list, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param LdapImportUsers uid_list: The uid listed for importing. This list will check users validity of ldap service based on configuration from the system. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ldap_users_import_post_with_http_info(uid_list, **kwargs)
else:
(data) = self.ldap_users_import_post_with_http_info(uid_list, **kwargs)
return data
def ldap_users_import_post_with_http_info(self, uid_list, **kwargs):
"""
Import selected available ldap users.
This endpoint adds the selected available ldap users to harbor based on related configuration parameters from the system. System will try to guess the user email address and realname, add to harbor user information. If have errors when import user, will return the list of importing failed uid and the failed reason.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_users_import_post_with_http_info(uid_list, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param LdapImportUsers uid_list: The uid listed for importing. This list will check users validity of ldap service based on configuration from the system. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['uid_list']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ldap_users_import_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'uid_list' is set
if ('uid_list' not in params) or (params['uid_list'] is None):
raise ValueError("Missing the required parameter `uid_list` when calling `ldap_users_import_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'uid_list' in params:
body_params = params['uid_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ldap/users/import', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ldap_users_search_post(self, **kwargs):
"""
Search available ldap users.
This endpoint searches the available ldap users based on related configuration parameters. Support searched by input ladp configuration, load configuration from the system and specific filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_users_search_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Registered user ID
:param LdapConf ldap_conf: ldap search configuration. ldapconf field can input ldap service configuration. If this item are blank, will load default configuration will load current configuration from the system.
:return: list[LdapUsers]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ldap_users_search_post_with_http_info(**kwargs)
else:
(data) = self.ldap_users_search_post_with_http_info(**kwargs)
return data
def ldap_users_search_post_with_http_info(self, **kwargs):
"""
Search available ldap users.
This endpoint searches the available ldap users based on related configuration parameters. Support searched by input ladp configuration, load configuration from the system and specific filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ldap_users_search_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Registered user ID
:param LdapConf ldap_conf: ldap search configuration. ldapconf field can input ldap service configuration. If this item are blank, will load default configuration will load current configuration from the system.
:return: list[LdapUsers]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'ldap_conf']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ldap_users_search_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'username' in params:
query_params.append(('username', params['username']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ldap_conf' in params:
body_params = params['ldap_conf']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ldap/users/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LdapUsers]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def logs_get(self, **kwargs):
"""
Get recent logs of the projects which the user is a member of
This endpoint let user see the recent operation logs of the projects which he is member of
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.logs_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Username of the operator.
:param str repository: The name of repository
:param str tag: The name of tag
:param str operation: The operation
:param str begin_timestamp: The begin timestamp
:param str end_timestamp: The end timestamp
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[AccessLog]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.logs_get_with_http_info(**kwargs)
else:
(data) = self.logs_get_with_http_info(**kwargs)
return data
def logs_get_with_http_info(self, **kwargs):
"""
Get recent logs of the projects which the user is a member of
This endpoint let user see the recent operation logs of the projects which he is member of
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.logs_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Username of the operator.
:param str repository: The name of repository
:param str tag: The name of tag
:param str operation: The operation
:param str begin_timestamp: The begin timestamp
:param str end_timestamp: The end timestamp
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[AccessLog]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'repository', 'tag', 'operation', 'begin_timestamp', 'end_timestamp', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logs_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'username' in params:
query_params.append(('username', params['username']))
if 'repository' in params:
query_params.append(('repository', params['repository']))
if 'tag' in params:
query_params.append(('tag', params['tag']))
if 'operation' in params:
query_params.append(('operation', params['operation']))
if 'begin_timestamp' in params:
query_params.append(('begin_timestamp', params['begin_timestamp']))
if 'end_timestamp' in params:
query_params.append(('end_timestamp', params['end_timestamp']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AccessLog]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def policies_replication_get(self, **kwargs):
"""
List filters policies by name and project_id
This endpoint let user list filters policies by name and project_id, if name and project_id are nil, list returns all policies
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The replication's policy name.
:param int project_id: Relevant project ID.
:return: list[RepPolicy]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.policies_replication_get_with_http_info(**kwargs)
else:
(data) = self.policies_replication_get_with_http_info(**kwargs)
return data
def policies_replication_get_with_http_info(self, **kwargs):
"""
List filters policies by name and project_id
This endpoint let user list filters policies by name and project_id, if name and project_id are nil, list returns all policies
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The replication's policy name.
:param int project_id: Relevant project ID.
:return: list[RepPolicy]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'project_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method policies_replication_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name']))
if 'project_id' in params:
query_params.append(('project_id', params['project_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/policies/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[RepPolicy]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def policies_replication_id_enablement_put(self, id, enabledflag, **kwargs):
"""
Put modifies enablement of the policy.
This endpoint let user update policy enablement flag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_enablement_put(id, enabledflag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:param RepPolicyEnablementReq enabledflag: The policy enablement flag. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.policies_replication_id_enablement_put_with_http_info(id, enabledflag, **kwargs)
else:
(data) = self.policies_replication_id_enablement_put_with_http_info(id, enabledflag, **kwargs)
return data
def policies_replication_id_enablement_put_with_http_info(self, id, enabledflag, **kwargs):
"""
Put modifies enablement of the policy.
This endpoint let user update policy enablement flag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_enablement_put_with_http_info(id, enabledflag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:param RepPolicyEnablementReq enabledflag: The policy enablement flag. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'enabledflag']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method policies_replication_id_enablement_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `policies_replication_id_enablement_put`")
# verify the required parameter 'enabledflag' is set
if ('enabledflag' not in params) or (params['enabledflag'] is None):
raise ValueError("Missing the required parameter `enabledflag` when calling `policies_replication_id_enablement_put`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'enabledflag' in params:
body_params = params['enabledflag']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/policies/replication/{id}/enablement', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def policies_replication_id_get(self, id, **kwargs):
"""
Get replication policy.
This endpoint let user search replication policy by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:return: RepPolicy
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.policies_replication_id_get_with_http_info(id, **kwargs)
else:
(data) = self.policies_replication_id_get_with_http_info(id, **kwargs)
return data
def policies_replication_id_get_with_http_info(self, id, **kwargs):
"""
Get replication policy.
This endpoint let user search replication policy by specific ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:return: RepPolicy
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method policies_replication_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `policies_replication_id_get`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/policies/replication/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepPolicy',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def policies_replication_id_put(self, id, policyupdate, **kwargs):
"""
Put modifies name, description, target and enablement of policy.
This endpoint let user update policy name, description, target and enablement.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_put(id, policyupdate, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:param RepPolicyUpdate policyupdate: Update policy name, description, target and enablement. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.policies_replication_id_put_with_http_info(id, policyupdate, **kwargs)
else:
(data) = self.policies_replication_id_put_with_http_info(id, policyupdate, **kwargs)
return data
def policies_replication_id_put_with_http_info(self, id, policyupdate, **kwargs):
"""
Put modifies name, description, target and enablement of policy.
This endpoint let user update policy name, description, target and enablement.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_id_put_with_http_info(id, policyupdate, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: policy ID (required)
:param RepPolicyUpdate policyupdate: Update policy name, description, target and enablement. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'policyupdate']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method policies_replication_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `policies_replication_id_put`")
# verify the required parameter 'policyupdate' is set
if ('policyupdate' not in params) or (params['policyupdate'] is None):
raise ValueError("Missing the required parameter `policyupdate` when calling `policies_replication_id_put`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'policyupdate' in params:
body_params = params['policyupdate']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/policies/replication/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def policies_replication_post(self, policyinfo, **kwargs):
"""
Post creates a policy
This endpoint let user creates a policy, and if it is enabled, the replication will be triggered right now.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_post(policyinfo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RepPolicyPost policyinfo: Create new policy. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.policies_replication_post_with_http_info(policyinfo, **kwargs)
else:
(data) = self.policies_replication_post_with_http_info(policyinfo, **kwargs)
return data
def policies_replication_post_with_http_info(self, policyinfo, **kwargs):
"""
Post creates a policy
This endpoint let user creates a policy, and if it is enabled, the replication will be triggered right now.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.policies_replication_post_with_http_info(policyinfo, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RepPolicyPost policyinfo: Create new policy. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['policyinfo']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method policies_replication_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'policyinfo' is set
if ('policyinfo' not in params) or (params['policyinfo'] is None):
raise ValueError("Missing the required parameter `policyinfo` when calling `policies_replication_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'policyinfo' in params:
body_params = params['policyinfo']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/policies/replication', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_get(self, **kwargs):
"""
List projects
This endpoint returns all projects created by Harbor, and can be filtered by project name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The name of project.
:param bool public: The project is public or private.
:param str owner: The name of project owner.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[Project]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_get_with_http_info(**kwargs)
else:
(data) = self.projects_get_with_http_info(**kwargs)
return data
def projects_get_with_http_info(self, **kwargs):
"""
List projects
This endpoint returns all projects created by Harbor, and can be filtered by project name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The name of project.
:param bool public: The project is public or private.
:param str owner: The name of project owner.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[Project]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'public', 'owner', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name']))
if 'public' in params:
query_params.append(('public', params['public']))
if 'owner' in params:
query_params.append(('owner', params['owner']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Project]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_head(self, project_name, **kwargs):
"""
Check if the project name user provided already exists.
This endpoint is used to check if the project name user provided already exist.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_head(project_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str project_name: Project name for checking exists. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_head_with_http_info(project_name, **kwargs)
else:
(data) = self.projects_head_with_http_info(project_name, **kwargs)
return data
def projects_head_with_http_info(self, project_name, **kwargs):
"""
Check if the project name user provided already exists.
This endpoint is used to check if the project name user provided already exist.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_head_with_http_info(project_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str project_name: Project name for checking exists. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_name' is set
if ('project_name' not in params) or (params['project_name'] is None):
raise ValueError("Missing the required parameter `project_name` when calling `projects_head`")
collection_formats = {}
path_params = {}
query_params = []
if 'project_name' in params:
query_params.append(('project_name', params['project_name']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_post(self, project, **kwargs):
"""
Create a new project.
This endpoint is for user to create a new project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_post(project, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProjectReq project: New created project. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_post_with_http_info(project, **kwargs)
else:
(data) = self.projects_post_with_http_info(project, **kwargs)
return data
def projects_post_with_http_info(self, project, **kwargs):
"""
Create a new project.
This endpoint is for user to create a new project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_post_with_http_info(project, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProjectReq project: New created project. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project' is set
if ('project' not in params) or (params['project'] is None):
raise ValueError("Missing the required parameter `project` when calling `projects_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project' in params:
body_params = params['project']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_delete(self, project_id, **kwargs):
"""
Delete project by projectID
This endpoint is aimed to delete project by project ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_delete(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Project ID of project which will be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_delete_with_http_info(project_id, **kwargs)
else:
(data) = self.projects_project_id_delete_with_http_info(project_id, **kwargs)
return data
def projects_project_id_delete_with_http_info(self, project_id, **kwargs):
"""
Delete project by projectID
This endpoint is aimed to delete project by project ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_delete_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Project ID of project which will be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_delete`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_get(self, project_id, **kwargs):
"""
Return specific project detail infomation
This endpoint returns specific project information by project ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_get(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Project ID for filtering results. (required)
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_get_with_http_info(project_id, **kwargs)
else:
(data) = self.projects_project_id_get_with_http_info(project_id, **kwargs)
return data
def projects_project_id_get_with_http_info(self, project_id, **kwargs):
"""
Return specific project detail infomation
This endpoint returns specific project information by project ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_get_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Project ID for filtering results. (required)
:return: Project
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_get`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Project',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_logs_get(self, project_id, **kwargs):
"""
Get access logs accompany with a relevant project.
This endpoint let user search access logs filtered by operations and date time ranges.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_logs_get(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID (required)
:param str username: Username of the operator.
:param str repository: The name of repository
:param str tag: The name of tag
:param str operation: The operation
:param str begin_timestamp: The begin timestamp
:param str end_timestamp: The end timestamp
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[AccessLog]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_logs_get_with_http_info(project_id, **kwargs)
else:
(data) = self.projects_project_id_logs_get_with_http_info(project_id, **kwargs)
return data
def projects_project_id_logs_get_with_http_info(self, project_id, **kwargs):
"""
Get access logs accompany with a relevant project.
This endpoint let user search access logs filtered by operations and date time ranges.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_logs_get_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID (required)
:param str username: Username of the operator.
:param str repository: The name of repository
:param str tag: The name of tag
:param str operation: The operation
:param str begin_timestamp: The begin timestamp
:param str end_timestamp: The end timestamp
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[AccessLog]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'username', 'repository', 'tag', 'operation', 'begin_timestamp', 'end_timestamp', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_logs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_logs_get`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
if 'username' in params:
query_params.append(('username', params['username']))
if 'repository' in params:
query_params.append(('repository', params['repository']))
if 'tag' in params:
query_params.append(('tag', params['tag']))
if 'operation' in params:
query_params.append(('operation', params['operation']))
if 'begin_timestamp' in params:
query_params.append(('begin_timestamp', params['begin_timestamp']))
if 'end_timestamp' in params:
query_params.append(('end_timestamp', params['end_timestamp']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[AccessLog]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_members_get(self, project_id, **kwargs):
"""
Return a project's relevant role members.
This endpoint is for user to search a specified project's relevant role members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_get(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_members_get_with_http_info(project_id, **kwargs)
else:
(data) = self.projects_project_id_members_get_with_http_info(project_id, **kwargs)
return data
def projects_project_id_members_get_with_http_info(self, project_id, **kwargs):
"""
Return a project's relevant role members.
This endpoint is for user to search a specified project's relevant role members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_get_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_members_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_members_get`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/members/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_members_post(self, project_id, **kwargs):
"""
Add project role member accompany with relevant project and user.
This endpoint is for user to add project role member accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_post(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param RoleParam roles: Role members for adding to relevant project. Only one role is supported in the role list.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_members_post_with_http_info(project_id, **kwargs)
else:
(data) = self.projects_project_id_members_post_with_http_info(project_id, **kwargs)
return data
def projects_project_id_members_post_with_http_info(self, project_id, **kwargs):
"""
Add project role member accompany with relevant project and user.
This endpoint is for user to add project role member accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_post_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param RoleParam roles: Role members for adding to relevant project. Only one role is supported in the role list.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'roles']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_members_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_members_post`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'roles' in params:
body_params = params['roles']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/members/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_members_user_id_delete(self, project_id, user_id, **kwargs):
"""
Delete project role members accompany with relevant project and user.
This endpoint is aimed to remove project role members already added to the relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_delete(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param int user_id: Relevant user ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_members_user_id_delete_with_http_info(project_id, user_id, **kwargs)
else:
(data) = self.projects_project_id_members_user_id_delete_with_http_info(project_id, user_id, **kwargs)
return data
def projects_project_id_members_user_id_delete_with_http_info(self, project_id, user_id, **kwargs):
"""
Delete project role members accompany with relevant project and user.
This endpoint is aimed to remove project role members already added to the relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_delete_with_http_info(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param int user_id: Relevant user ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_members_user_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_members_user_id_delete`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `projects_project_id_members_user_id_delete`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/members/{user_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_members_user_id_get(self, project_id, user_id, **kwargs):
"""
Return role members accompany with relevant project and user.
This endpoint is for user to get role members accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_get(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID (required)
:param int user_id: Relevant user ID (required)
:return: list[Role]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_members_user_id_get_with_http_info(project_id, user_id, **kwargs)
else:
(data) = self.projects_project_id_members_user_id_get_with_http_info(project_id, user_id, **kwargs)
return data
def projects_project_id_members_user_id_get_with_http_info(self, project_id, user_id, **kwargs):
"""
Return role members accompany with relevant project and user.
This endpoint is for user to get role members accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_get_with_http_info(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID (required)
:param int user_id: Relevant user ID (required)
:return: list[Role]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_members_user_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_members_user_id_get`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `projects_project_id_members_user_id_get`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/members/{user_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Role]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_members_user_id_put(self, project_id, user_id, **kwargs):
"""
Update project role members accompany with relevant project and user.
This endpoint is for user to update current project role members accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_put(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param int user_id: Relevant user ID. (required)
:param RoleParam roles: Updates for roles and username.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_members_user_id_put_with_http_info(project_id, user_id, **kwargs)
else:
(data) = self.projects_project_id_members_user_id_put_with_http_info(project_id, user_id, **kwargs)
return data
def projects_project_id_members_user_id_put_with_http_info(self, project_id, user_id, **kwargs):
"""
Update project role members accompany with relevant project and user.
This endpoint is for user to update current project role members accompany with relevant project and user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_members_user_id_put_with_http_info(project_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param int user_id: Relevant user ID. (required)
:param RoleParam roles: Updates for roles and username.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'user_id', 'roles']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_members_user_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_members_user_id_put`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `projects_project_id_members_user_id_put`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'roles' in params:
body_params = params['roles']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/members/{user_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def projects_project_id_publicity_put(self, project_id, project, **kwargs):
"""
Update properties for a selected project.
This endpoint is aimed to toggle a project publicity status.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_publicity_put(project_id, project, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Selected project ID. (required)
:param Project project: Updates of project. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.projects_project_id_publicity_put_with_http_info(project_id, project, **kwargs)
else:
(data) = self.projects_project_id_publicity_put_with_http_info(project_id, project, **kwargs)
return data
def projects_project_id_publicity_put_with_http_info(self, project_id, project, **kwargs):
"""
Update properties for a selected project.
This endpoint is aimed to toggle a project publicity status.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.projects_project_id_publicity_put_with_http_info(project_id, project, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Selected project ID. (required)
:param Project project: Updates of project. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'project']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_publicity_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `projects_project_id_publicity_put`")
# verify the required parameter 'project' is set
if ('project' not in params) or (params['project'] is None):
raise ValueError("Missing the required parameter `project` when calling `projects_project_id_publicity_put`")
collection_formats = {}
path_params = {}
if 'project_id' in params:
path_params['project_id'] = params['project_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'project' in params:
body_params = params['project']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/projects/{project_id}/publicity', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_get(self, project_id, **kwargs):
"""
Get repositories accompany with relevant project and repo name.
This endpoint let user search repositories accompanying with relevant project ID and repo name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_get(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param str q: Repo name for filtering results.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_get_with_http_info(project_id, **kwargs)
else:
(data) = self.repositories_get_with_http_info(project_id, **kwargs)
return data
def repositories_get_with_http_info(self, project_id, **kwargs):
"""
Get repositories accompany with relevant project and repo name.
This endpoint let user search repositories accompanying with relevant project ID and repo name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_get_with_http_info(project_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int project_id: Relevant project ID. (required)
:param str q: Repo name for filtering results.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page, default is 10, maximum is 100.
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_id', 'q', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_id' is set
if ('project_id' not in params) or (params['project_id'] is None):
raise ValueError("Missing the required parameter `project_id` when calling `repositories_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'project_id' in params:
query_params.append(('project_id', params['project_id']))
if 'q' in params:
query_params.append(('q', params['q']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_delete(self, repo_name, **kwargs):
"""
Delete a repository.
This endpoint let user delete a repository with name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_delete(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: The name of repository which will be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_delete_with_http_info(repo_name, **kwargs)
else:
(data) = self.repositories_repo_name_delete_with_http_info(repo_name, **kwargs)
return data
def repositories_repo_name_delete_with_http_info(self, repo_name, **kwargs):
"""
Delete a repository.
This endpoint let user delete a repository with name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_delete_with_http_info(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: The name of repository which will be deleted. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_delete`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_signatures_get(self, repo_name, **kwargs):
"""
Get signature information of a repository
This endpoint aims to retrieve signature information of a repository, the data is from the nested notary instance of Harbor. If the repository does not have any signature information in notary, this API will return an empty list with response code 200, instead of 404
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_signatures_get(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: repository name. (required)
:return: list[RepoSignature]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_signatures_get_with_http_info(repo_name, **kwargs)
else:
(data) = self.repositories_repo_name_signatures_get_with_http_info(repo_name, **kwargs)
return data
def repositories_repo_name_signatures_get_with_http_info(self, repo_name, **kwargs):
"""
Get signature information of a repository
This endpoint aims to retrieve signature information of a repository, the data is from the nested notary instance of Harbor. If the repository does not have any signature information in notary, this API will return an empty list with response code 200, instead of 404
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_signatures_get_with_http_info(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: repository name. (required)
:return: list[RepoSignature]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_signatures_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_signatures_get`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/signatures', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[RepoSignature]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_get(self, repo_name, **kwargs):
"""
Get tags of a relevant repository.
This endpoint aims to retrieve tags from a relevant repository. If deployed with Notary, the signature property of response represents whether the image is singed or not. If the property is null, the image is unsigned.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_get(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Relevant repository name. (required)
:return: list[DetailedTag]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_get_with_http_info(repo_name, **kwargs)
else:
(data) = self.repositories_repo_name_tags_get_with_http_info(repo_name, **kwargs)
return data
def repositories_repo_name_tags_get_with_http_info(self, repo_name, **kwargs):
"""
Get tags of a relevant repository.
This endpoint aims to retrieve tags from a relevant repository. If deployed with Notary, the signature property of response represents whether the image is singed or not. If the property is null, the image is unsigned.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_get_with_http_info(repo_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Relevant repository name. (required)
:return: list[DetailedTag]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_get`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DetailedTag]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_tag_delete(self, repo_name, tag, **kwargs):
"""
Delete a tag in a repository.
This endpoint let user delete tags with repo name and tag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_delete(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: The name of repository which will be deleted. (required)
:param str tag: Tag of a repository. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_tag_delete_with_http_info(repo_name, tag, **kwargs)
else:
(data) = self.repositories_repo_name_tags_tag_delete_with_http_info(repo_name, tag, **kwargs)
return data
def repositories_repo_name_tags_tag_delete_with_http_info(self, repo_name, tag, **kwargs):
"""
Delete a tag in a repository.
This endpoint let user delete tags with repo name and tag.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_delete_with_http_info(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: The name of repository which will be deleted. (required)
:param str tag: Tag of a repository. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name', 'tag']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_tag_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_tag_delete`")
# verify the required parameter 'tag' is set
if ('tag' not in params) or (params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `repositories_repo_name_tags_tag_delete`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
if 'tag' in params:
path_params['tag'] = params['tag']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags/{tag}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_tag_get(self, repo_name, tag, **kwargs):
"""
Get the tag of the repository.
This endpoint aims to retrieve the tag of the repository. If deployed with Notary, the signature property of response represents whether the image is singed or not. If the property is null, the image is unsigned.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_get(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Relevant repository name. (required)
:param str tag: Tag of the repository. (required)
:return: DetailedTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_tag_get_with_http_info(repo_name, tag, **kwargs)
else:
(data) = self.repositories_repo_name_tags_tag_get_with_http_info(repo_name, tag, **kwargs)
return data
def repositories_repo_name_tags_tag_get_with_http_info(self, repo_name, tag, **kwargs):
"""
Get the tag of the repository.
This endpoint aims to retrieve the tag of the repository. If deployed with Notary, the signature property of response represents whether the image is singed or not. If the property is null, the image is unsigned.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_get_with_http_info(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Relevant repository name. (required)
:param str tag: Tag of the repository. (required)
:return: DetailedTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name', 'tag']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_tag_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_tag_get`")
# verify the required parameter 'tag' is set
if ('tag' not in params) or (params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `repositories_repo_name_tags_tag_get`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
if 'tag' in params:
path_params['tag'] = params['tag']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags/{tag}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DetailedTag',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_tag_manifest_get(self, repo_name, tag, **kwargs):
"""
Get manifests of a relevant repository.
This endpoint aims to retreive manifests from a relevant repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_manifest_get(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:param str version: The version of manifest, valid value are \"v1\" and \"v2\", default is \"v2\"
:return: Manifest
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_tag_manifest_get_with_http_info(repo_name, tag, **kwargs)
else:
(data) = self.repositories_repo_name_tags_tag_manifest_get_with_http_info(repo_name, tag, **kwargs)
return data
def repositories_repo_name_tags_tag_manifest_get_with_http_info(self, repo_name, tag, **kwargs):
"""
Get manifests of a relevant repository.
This endpoint aims to retreive manifests from a relevant repository.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_manifest_get_with_http_info(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:param str version: The version of manifest, valid value are \"v1\" and \"v2\", default is \"v2\"
:return: Manifest
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name', 'tag', 'version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_tag_manifest_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_tag_manifest_get`")
# verify the required parameter 'tag' is set
if ('tag' not in params) or (params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `repositories_repo_name_tags_tag_manifest_get`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
if 'tag' in params:
path_params['tag'] = params['tag']
query_params = []
if 'version' in params:
query_params.append(('version', params['version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags/{tag}/manifest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Manifest',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_tag_scan_post(self, repo_name, tag, **kwargs):
"""
Scan the image.
Trigger jobservice to call Clair API to scan the image identified by the repo_name and tag. Only project admins have permission to scan images under the project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_scan_post(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_tag_scan_post_with_http_info(repo_name, tag, **kwargs)
else:
(data) = self.repositories_repo_name_tags_tag_scan_post_with_http_info(repo_name, tag, **kwargs)
return data
def repositories_repo_name_tags_tag_scan_post_with_http_info(self, repo_name, tag, **kwargs):
"""
Scan the image.
Trigger jobservice to call Clair API to scan the image identified by the repo_name and tag. Only project admins have permission to scan images under the project.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_scan_post_with_http_info(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name', 'tag']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_tag_scan_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_tag_scan_post`")
# verify the required parameter 'tag' is set
if ('tag' not in params) or (params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `repositories_repo_name_tags_tag_scan_post`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
if 'tag' in params:
path_params['tag'] = params['tag']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags/{tag}/scan', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_repo_name_tags_tag_vulnerability_details_get(self, repo_name, tag, **kwargs):
"""
Get vulnerability details of the image.
Call Clair API to get the vulnerability based on the previous successful scan.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_vulnerability_details_get(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:return: list[DefinitionsVulnerabilityItem]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_repo_name_tags_tag_vulnerability_details_get_with_http_info(repo_name, tag, **kwargs)
else:
(data) = self.repositories_repo_name_tags_tag_vulnerability_details_get_with_http_info(repo_name, tag, **kwargs)
return data
def repositories_repo_name_tags_tag_vulnerability_details_get_with_http_info(self, repo_name, tag, **kwargs):
"""
Get vulnerability details of the image.
Call Clair API to get the vulnerability based on the previous successful scan.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_repo_name_tags_tag_vulnerability_details_get_with_http_info(repo_name, tag, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str repo_name: Repository name (required)
:param str tag: Tag name (required)
:return: list[DefinitionsVulnerabilityItem]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['repo_name', 'tag']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_repo_name_tags_tag_vulnerability_details_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'repo_name' is set
if ('repo_name' not in params) or (params['repo_name'] is None):
raise ValueError("Missing the required parameter `repo_name` when calling `repositories_repo_name_tags_tag_vulnerability_details_get`")
# verify the required parameter 'tag' is set
if ('tag' not in params) or (params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `repositories_repo_name_tags_tag_vulnerability_details_get`")
collection_formats = {}
path_params = {}
if 'repo_name' in params:
path_params['repo_name'] = params['repo_name']
if 'tag' in params:
path_params['tag'] = params['tag']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/{repo_name}/tags/{tag}/vulnerability/details', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DefinitionsVulnerabilityItem]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def repositories_top_get(self, **kwargs):
"""
Get public repositories which are accessed most.
This endpoint aims to let users see the most popular public repositories
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_top_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int count: The number of the requested public repositories, default is 10 if not provided.
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.repositories_top_get_with_http_info(**kwargs)
else:
(data) = self.repositories_top_get_with_http_info(**kwargs)
return data
def repositories_top_get_with_http_info(self, **kwargs):
"""
Get public repositories which are accessed most.
This endpoint aims to let users see the most popular public repositories
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.repositories_top_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int count: The number of the requested public repositories, default is 10 if not provided.
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['count']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method repositories_top_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'count' in params:
query_params.append(('count', params['count']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/repositories/top', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_get(self, q, **kwargs):
"""
Search for projects and repositories
The Search endpoint returns information about the projects and repositories offered at public status or related to the current logged in user. The response includes the project and repository list in a proper display order.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_get(q, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str q: Search parameter for project and repository name. (required)
:return: list[Search]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.search_get_with_http_info(q, **kwargs)
else:
(data) = self.search_get_with_http_info(q, **kwargs)
return data
def search_get_with_http_info(self, q, **kwargs):
"""
Search for projects and repositories
The Search endpoint returns information about the projects and repositories offered at public status or related to the current logged in user. The response includes the project and repository list in a proper display order.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.search_get_with_http_info(q, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str q: Search parameter for project and repository name. (required)
:return: list[Search]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in params) or (params['q'] is None):
raise ValueError("Missing the required parameter `q` when calling `search_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'q' in params:
query_params.append(('q', params['q']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Search]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def statistics_get(self, **kwargs):
"""
Get projects number and repositories number relevant to the user
This endpoint is aimed to statistic all of the projects number and repositories number relevant to the logined user, also the public projects number and repositories number. If the user is admin, he can also get total projects number and total repositories number.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.statistics_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StatisticMap
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.statistics_get_with_http_info(**kwargs)
else:
(data) = self.statistics_get_with_http_info(**kwargs)
return data
def statistics_get_with_http_info(self, **kwargs):
"""
Get projects number and repositories number relevant to the user
This endpoint is aimed to statistic all of the projects number and repositories number relevant to the logined user, also the public projects number and repositories number. If the user is admin, he can also get total projects number and total repositories number.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.statistics_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StatisticMap
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method statistics_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/statistics', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StatisticMap',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def systeminfo_get(self, **kwargs):
"""
Get general system info
This API is for retrieving general system info, this can be called by anonymous request.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.systeminfo_get_with_http_info(**kwargs)
else:
(data) = self.systeminfo_get_with_http_info(**kwargs)
return data
def systeminfo_get_with_http_info(self, **kwargs):
"""
Get general system info
This API is for retrieving general system info, this can be called by anonymous request.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method systeminfo_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/systeminfo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def systeminfo_getcert_get(self, **kwargs):
"""
Get default root certificate under OVA deployment.
This endpoint is for downloading a default root certificate that only provides for admin user under OVA deployment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_getcert_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.systeminfo_getcert_get_with_http_info(**kwargs)
else:
(data) = self.systeminfo_getcert_get_with_http_info(**kwargs)
return data
def systeminfo_getcert_get_with_http_info(self, **kwargs):
"""
Get default root certificate under OVA deployment.
This endpoint is for downloading a default root certificate that only provides for admin user under OVA deployment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_getcert_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method systeminfo_getcert_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/systeminfo/getcert', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def systeminfo_volumes_get(self, **kwargs):
"""
Get system volume info (total/free size).
This endpoint is for retrieving system volume info that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_volumes_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.systeminfo_volumes_get_with_http_info(**kwargs)
else:
(data) = self.systeminfo_volumes_get_with_http_info(**kwargs)
return data
def systeminfo_volumes_get_with_http_info(self, **kwargs):
"""
Get system volume info (total/free size).
This endpoint is for retrieving system volume info that only provides for admin user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.systeminfo_volumes_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method systeminfo_volumes_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/systeminfo/volumes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_get(self, **kwargs):
"""
List filters targets by name.
This endpoint let user list filters targets by name, if name is nil, list returns all targets.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The replication's target name.
:return: list[RepTarget]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_get_with_http_info(**kwargs)
else:
(data) = self.targets_get_with_http_info(**kwargs)
return data
def targets_get_with_http_info(self, **kwargs):
"""
List filters targets by name.
This endpoint let user list filters targets by name, if name is nil, list returns all targets.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: The replication's target name.
:return: list[RepTarget]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[RepTarget]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_id_delete(self, id, **kwargs):
"""
Delete specific replication's target.
This endpoint is for to delete specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.targets_id_delete_with_http_info(id, **kwargs)
return data
def targets_id_delete_with_http_info(self, id, **kwargs):
"""
Delete specific replication's target.
This endpoint is for to delete specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `targets_id_delete`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_id_get(self, id, **kwargs):
"""
Get replication's target.
This endpoint is for get specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: RepTarget
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_id_get_with_http_info(id, **kwargs)
else:
(data) = self.targets_id_get_with_http_info(id, **kwargs)
return data
def targets_id_get_with_http_info(self, id, **kwargs):
"""
Get replication's target.
This endpoint is for get specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: RepTarget
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `targets_id_get`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepTarget',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_id_ping_post(self, id, **kwargs):
"""
Ping target.
This endpoint is for ping target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_ping_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_id_ping_post_with_http_info(id, **kwargs)
else:
(data) = self.targets_id_ping_post_with_http_info(id, **kwargs)
return data
def targets_id_ping_post_with_http_info(self, id, **kwargs):
"""
Ping target.
This endpoint is for ping target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_ping_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_id_ping_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `targets_id_ping_post`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/{id}/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_id_policies_get(self, id, **kwargs):
"""
List the target relevant policies.
This endpoint list policies filter with specific replication's target ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_policies_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: list[RepPolicy]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_id_policies_get_with_http_info(id, **kwargs)
else:
(data) = self.targets_id_policies_get_with_http_info(id, **kwargs)
return data
def targets_id_policies_get_with_http_info(self, id, **kwargs):
"""
List the target relevant policies.
This endpoint list policies filter with specific replication's target ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_policies_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:return: list[RepPolicy]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_id_policies_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `targets_id_policies_get`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/{id}/policies/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[RepPolicy]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_id_put(self, id, repo_target, **kwargs):
"""
Update replication's target.
This endpoint is for update specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_put(id, repo_target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:param PutTarget repo_target: Updates of replication's target. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_id_put_with_http_info(id, repo_target, **kwargs)
else:
(data) = self.targets_id_put_with_http_info(id, repo_target, **kwargs)
return data
def targets_id_put_with_http_info(self, id, repo_target, **kwargs):
"""
Update replication's target.
This endpoint is for update specific replication's target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_id_put_with_http_info(id, repo_target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: The replication's target ID. (required)
:param PutTarget repo_target: Updates of replication's target. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'repo_target']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `targets_id_put`")
# verify the required parameter 'repo_target' is set
if ('repo_target' not in params) or (params['repo_target'] is None):
raise ValueError("Missing the required parameter `repo_target` when calling `targets_id_put`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'repo_target' in params:
body_params = params['repo_target']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_ping_post(self, target, **kwargs):
"""
Ping validates target.
This endpoint is for ping validates whether the target is reachable and whether the credential is valid.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_ping_post(target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PingTarget target: The target object. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_ping_post_with_http_info(target, **kwargs)
else:
(data) = self.targets_ping_post_with_http_info(target, **kwargs)
return data
def targets_ping_post_with_http_info(self, target, **kwargs):
"""
Ping validates target.
This endpoint is for ping validates whether the target is reachable and whether the credential is valid.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_ping_post_with_http_info(target, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PingTarget target: The target object. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['target']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_ping_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'target' is set
if ('target' not in params) or (params['target'] is None):
raise ValueError("Missing the required parameter `target` when calling `targets_ping_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'target' in params:
body_params = params['target']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets/ping', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def targets_post(self, reptarget, **kwargs):
"""
Create a new replication target.
This endpoint is for user to create a new replication target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_post(reptarget, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RepTargetPost reptarget: New created replication target. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.targets_post_with_http_info(reptarget, **kwargs)
else:
(data) = self.targets_post_with_http_info(reptarget, **kwargs)
return data
def targets_post_with_http_info(self, reptarget, **kwargs):
"""
Create a new replication target.
This endpoint is for user to create a new replication target.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.targets_post_with_http_info(reptarget, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RepTargetPost reptarget: New created replication target. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['reptarget']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method targets_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'reptarget' is set
if ('reptarget' not in params) or (params['reptarget'] is None):
raise ValueError("Missing the required parameter `reptarget` when calling `targets_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'reptarget' in params:
body_params = params['reptarget']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/targets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_current_get(self, **kwargs):
"""
Get current user info.
This endpoint is to get the current user infomation.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_current_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_current_get_with_http_info(**kwargs)
else:
(data) = self.users_current_get_with_http_info(**kwargs)
return data
def users_current_get_with_http_info(self, **kwargs):
"""
Get current user info.
This endpoint is to get the current user infomation.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_current_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_current_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/current', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_get(self, **kwargs):
"""
Get registered users of Harbor.
This endpoint is for user to search registered users, support for filtering results with username.Notice, by now this operation is only for administrator.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Username for filtering results.
:param str email: Email for filtering results.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page.
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_get_with_http_info(**kwargs)
else:
(data) = self.users_get_with_http_info(**kwargs)
return data
def users_get_with_http_info(self, **kwargs):
"""
Get registered users of Harbor.
This endpoint is for user to search registered users, support for filtering results with username.Notice, by now this operation is only for administrator.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str username: Username for filtering results.
:param str email: Email for filtering results.
:param int page: The page nubmer, default is 1.
:param int page_size: The size of per page.
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'email', 'page', 'page_size']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'username' in params:
query_params.append(('username', params['username']))
if 'email' in params:
query_params.append(('email', params['email']))
if 'page' in params:
query_params.append(('page', params['page']))
if 'page_size' in params:
query_params.append(('page_size', params['page_size']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_post(self, user, **kwargs):
"""
Creates a new user account.
This endpoint is to create a user if the user does not already exist.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_post(user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param User user: New created user. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_post_with_http_info(user, **kwargs)
else:
(data) = self.users_post_with_http_info(user, **kwargs)
return data
def users_post_with_http_info(self, user, **kwargs):
"""
Creates a new user account.
This endpoint is to create a user if the user does not already exist.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_post_with_http_info(user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param User user: New created user. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in params) or (params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `users_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'user' in params:
body_params = params['user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_user_id_delete(self, user_id, **kwargs):
"""
Mark a registered user as be removed.
This endpoint let administrator of Harbor mark a registered user as be removed.It actually won't be deleted from DB.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_delete(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: User ID for marking as to be removed. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_user_id_delete_with_http_info(user_id, **kwargs)
else:
(data) = self.users_user_id_delete_with_http_info(user_id, **kwargs)
return data
def users_user_id_delete_with_http_info(self, user_id, **kwargs):
"""
Mark a registered user as be removed.
This endpoint let administrator of Harbor mark a registered user as be removed.It actually won't be deleted from DB.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_delete_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: User ID for marking as to be removed. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_user_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `users_user_id_delete`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/{user_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_user_id_get(self, user_id, **kwargs):
"""
Get a user's profile.
Get user's profile with user id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_get(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_user_id_get_with_http_info(user_id, **kwargs)
else:
(data) = self.users_user_id_get_with_http_info(user_id, **kwargs)
return data
def users_user_id_get_with_http_info(self, user_id, **kwargs):
"""
Get a user's profile.
Get user's profile with user id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_get_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_user_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `users_user_id_get`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/{user_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_user_id_password_put(self, user_id, password, **kwargs):
"""
Change the password on a user that already exists.
This endpoint is for user to update password. Users with the admin role can change any user's password. Guest users can change only their own password.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_password_put(user_id, password, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID. (required)
:param Password password: Password to be updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_user_id_password_put_with_http_info(user_id, password, **kwargs)
else:
(data) = self.users_user_id_password_put_with_http_info(user_id, password, **kwargs)
return data
def users_user_id_password_put_with_http_info(self, user_id, password, **kwargs):
"""
Change the password on a user that already exists.
This endpoint is for user to update password. Users with the admin role can change any user's password. Guest users can change only their own password.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_password_put_with_http_info(user_id, password, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID. (required)
:param Password password: Password to be updated. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'password']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_user_id_password_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `users_user_id_password_put`")
# verify the required parameter 'password' is set
if ('password' not in params) or (params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `users_user_id_password_put`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'password' in params:
body_params = params['password']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/{user_id}/password', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_user_id_put(self, user_id, profile, **kwargs):
"""
Update a registered user to change his profile.
This endpoint let a registered user change his profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_put(user_id, profile, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:param UserProfile profile: Only email, realname and comment can be modified. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_user_id_put_with_http_info(user_id, profile, **kwargs)
else:
(data) = self.users_user_id_put_with_http_info(user_id, profile, **kwargs)
return data
def users_user_id_put_with_http_info(self, user_id, profile, **kwargs):
"""
Update a registered user to change his profile.
This endpoint let a registered user change his profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_put_with_http_info(user_id, profile, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:param UserProfile profile: Only email, realname and comment can be modified. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'profile']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_user_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `users_user_id_put`")
# verify the required parameter 'profile' is set
if ('profile' not in params) or (params['profile'] is None):
raise ValueError("Missing the required parameter `profile` when calling `users_user_id_put`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'profile' in params:
body_params = params['profile']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/{user_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def users_user_id_sysadmin_put(self, user_id, has_admin_role, **kwargs):
"""
Update a registered user to change to be an administrator of Harbor.
This endpoint let a registered user change to be an administrator of Harbor.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_sysadmin_put(user_id, has_admin_role, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:param HasAdminRole has_admin_role: Toggle a user to admin or not. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.users_user_id_sysadmin_put_with_http_info(user_id, has_admin_role, **kwargs)
else:
(data) = self.users_user_id_sysadmin_put_with_http_info(user_id, has_admin_role, **kwargs)
return data
def users_user_id_sysadmin_put_with_http_info(self, user_id, has_admin_role, **kwargs):
"""
Update a registered user to change to be an administrator of Harbor.
This endpoint let a registered user change to be an administrator of Harbor.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.users_user_id_sysadmin_put_with_http_info(user_id, has_admin_role, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int user_id: Registered user ID (required)
:param HasAdminRole has_admin_role: Toggle a user to admin or not. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'has_admin_role']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method users_user_id_sysadmin_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `users_user_id_sysadmin_put`")
# verify the required parameter 'has_admin_role' is set
if ('has_admin_role' not in params) or (params['has_admin_role'] is None):
raise ValueError("Missing the required parameter `has_admin_role` when calling `users_user_id_sysadmin_put`")
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'has_admin_role' in params:
body_params = params['has_admin_role']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/plain'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['text/plain', 'application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/users/{user_id}/sysadmin', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.967016
| 326
| 0.575537
| 30,555
| 293,260
| 5.286631
| 0.015022
| 0.060421
| 0.021147
| 0.027189
| 0.985687
| 0.977639
| 0.971411
| 0.965456
| 0.952004
| 0.943194
| 0
| 0.00049
| 0.346212
| 293,260
| 6,669
| 327
| 43.973609
| 0.842012
| 0.34393
| 0
| 0.811736
| 1
| 0
| 0.167883
| 0.042791
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037592
| false
| 0.003667
| 0.004279
| 0
| 0.098105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba9a29320facc1db1f87d19150c75486d2539482
| 57,360
|
py
|
Python
|
sdk/python/pulumi_oci/sch/outputs.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/sch/outputs.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/sch/outputs.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'ServiceConnectorSource',
'ServiceConnectorSourceCursor',
'ServiceConnectorSourceLogSource',
'ServiceConnectorTarget',
'ServiceConnectorTask',
'GetServiceConnectorSourceResult',
'GetServiceConnectorSourceCursorResult',
'GetServiceConnectorSourceLogSourceResult',
'GetServiceConnectorTargetResult',
'GetServiceConnectorTaskResult',
'GetServiceConnectorsFilterResult',
'GetServiceConnectorsServiceConnectorCollectionResult',
'GetServiceConnectorsServiceConnectorCollectionItemResult',
'GetServiceConnectorsServiceConnectorCollectionItemSourceResult',
'GetServiceConnectorsServiceConnectorCollectionItemSourceCursorResult',
'GetServiceConnectorsServiceConnectorCollectionItemSourceLogSourceResult',
'GetServiceConnectorsServiceConnectorCollectionItemTargetResult',
'GetServiceConnectorsServiceConnectorCollectionItemTaskResult',
]
@pulumi.output_type
class ServiceConnectorSource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "logSources":
suggest = "log_sources"
elif key == "streamId":
suggest = "stream_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceConnectorSource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceConnectorSource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceConnectorSource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
cursor: Optional['outputs.ServiceConnectorSourceCursor'] = None,
log_sources: Optional[Sequence['outputs.ServiceConnectorSourceLogSource']] = None,
stream_id: Optional[str] = None):
"""
:param str kind: (Updatable) The type descriminator.
:param 'ServiceConnectorSourceCursorArgs' cursor: (Updatable) The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
:param Sequence['ServiceConnectorSourceLogSourceArgs'] log_sources: (Updatable) The resources affected by this work request.
:param str stream_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
pulumi.set(__self__, "kind", kind)
if cursor is not None:
pulumi.set(__self__, "cursor", cursor)
if log_sources is not None:
pulumi.set(__self__, "log_sources", log_sources)
if stream_id is not None:
pulumi.set(__self__, "stream_id", stream_id)
@property
@pulumi.getter
def kind(self) -> str:
"""
(Updatable) The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def cursor(self) -> Optional['outputs.ServiceConnectorSourceCursor']:
"""
(Updatable) The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
"""
return pulumi.get(self, "cursor")
@property
@pulumi.getter(name="logSources")
def log_sources(self) -> Optional[Sequence['outputs.ServiceConnectorSourceLogSource']]:
"""
(Updatable) The resources affected by this work request.
"""
return pulumi.get(self, "log_sources")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@pulumi.output_type
class ServiceConnectorSourceCursor(dict):
def __init__(__self__, *,
kind: Optional[str] = None):
"""
:param str kind: (Updatable) The type descriminator.
"""
if kind is not None:
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
(Updatable) The type descriminator.
"""
return pulumi.get(self, "kind")
@pulumi.output_type
class ServiceConnectorSourceLogSource(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "compartmentId":
suggest = "compartment_id"
elif key == "logGroupId":
suggest = "log_group_id"
elif key == "logId":
suggest = "log_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceConnectorSourceLogSource. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceConnectorSourceLogSource.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceConnectorSourceLogSource.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compartment_id: Optional[str] = None,
log_group_id: Optional[str] = None,
log_id: Optional[str] = None):
"""
:param str compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
:param str log_group_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str log_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if log_group_id is not None:
pulumi.set(__self__, "log_group_id", log_group_id)
if log_id is not None:
pulumi.set(__self__, "log_id", log_id)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter(name="logId")
def log_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
return pulumi.get(self, "log_id")
@pulumi.output_type
class ServiceConnectorTarget(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "batchRolloverSizeInMbs":
suggest = "batch_rollover_size_in_mbs"
elif key == "batchRolloverTimeInMs":
suggest = "batch_rollover_time_in_ms"
elif key == "compartmentId":
suggest = "compartment_id"
elif key == "enableFormattedMessaging":
suggest = "enable_formatted_messaging"
elif key == "functionId":
suggest = "function_id"
elif key == "logGroupId":
suggest = "log_group_id"
elif key == "metricNamespace":
suggest = "metric_namespace"
elif key == "objectNamePrefix":
suggest = "object_name_prefix"
elif key == "streamId":
suggest = "stream_id"
elif key == "topicId":
suggest = "topic_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceConnectorTarget. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceConnectorTarget.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceConnectorTarget.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
batch_rollover_size_in_mbs: Optional[int] = None,
batch_rollover_time_in_ms: Optional[int] = None,
bucket: Optional[str] = None,
compartment_id: Optional[str] = None,
enable_formatted_messaging: Optional[bool] = None,
function_id: Optional[str] = None,
log_group_id: Optional[str] = None,
metric: Optional[str] = None,
metric_namespace: Optional[str] = None,
namespace: Optional[str] = None,
object_name_prefix: Optional[str] = None,
stream_id: Optional[str] = None,
topic_id: Optional[str] = None):
"""
:param str kind: (Updatable) The type descriminator.
:param int batch_rollover_size_in_mbs: (Updatable) The batch rollover size in megabytes.
:param int batch_rollover_time_in_ms: (Updatable) The batch rollover time in milliseconds.
:param str bucket: (Updatable) The name of the bucket. Avoid entering confidential information.
:param str compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
:param bool enable_formatted_messaging: (Updatable) Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
:param str function_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
:param str log_group_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str metric: (Updatable) The name of the metric. Example: `CpuUtilization`
:param str metric_namespace: (Updatable) The namespace of the metric. Example: `oci_computeagent`
:param str namespace: (Updatable) The namespace.
:param str object_name_prefix: (Updatable) The prefix of the objects. Avoid entering confidential information.
:param str stream_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
:param str topic_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
pulumi.set(__self__, "kind", kind)
if batch_rollover_size_in_mbs is not None:
pulumi.set(__self__, "batch_rollover_size_in_mbs", batch_rollover_size_in_mbs)
if batch_rollover_time_in_ms is not None:
pulumi.set(__self__, "batch_rollover_time_in_ms", batch_rollover_time_in_ms)
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if enable_formatted_messaging is not None:
pulumi.set(__self__, "enable_formatted_messaging", enable_formatted_messaging)
if function_id is not None:
pulumi.set(__self__, "function_id", function_id)
if log_group_id is not None:
pulumi.set(__self__, "log_group_id", log_group_id)
if metric is not None:
pulumi.set(__self__, "metric", metric)
if metric_namespace is not None:
pulumi.set(__self__, "metric_namespace", metric_namespace)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if object_name_prefix is not None:
pulumi.set(__self__, "object_name_prefix", object_name_prefix)
if stream_id is not None:
pulumi.set(__self__, "stream_id", stream_id)
if topic_id is not None:
pulumi.set(__self__, "topic_id", topic_id)
@property
@pulumi.getter
def kind(self) -> str:
"""
(Updatable) The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="batchRolloverSizeInMbs")
def batch_rollover_size_in_mbs(self) -> Optional[int]:
"""
(Updatable) The batch rollover size in megabytes.
"""
return pulumi.get(self, "batch_rollover_size_in_mbs")
@property
@pulumi.getter(name="batchRolloverTimeInMs")
def batch_rollover_time_in_ms(self) -> Optional[int]:
"""
(Updatable) The batch rollover time in milliseconds.
"""
return pulumi.get(self, "batch_rollover_time_in_ms")
@property
@pulumi.getter
def bucket(self) -> Optional[str]:
"""
(Updatable) The name of the bucket. Avoid entering confidential information.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="enableFormattedMessaging")
def enable_formatted_messaging(self) -> Optional[bool]:
"""
(Updatable) Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
"""
return pulumi.get(self, "enable_formatted_messaging")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter
def metric(self) -> Optional[str]:
"""
(Updatable) The name of the metric. Example: `CpuUtilization`
"""
return pulumi.get(self, "metric")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> Optional[str]:
"""
(Updatable) The namespace of the metric. Example: `oci_computeagent`
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
(Updatable) The namespace.
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="objectNamePrefix")
def object_name_prefix(self) -> Optional[str]:
"""
(Updatable) The prefix of the objects. Avoid entering confidential information.
"""
return pulumi.get(self, "object_name_prefix")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@property
@pulumi.getter(name="topicId")
def topic_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
return pulumi.get(self, "topic_id")
@pulumi.output_type
class ServiceConnectorTask(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "batchSizeInKbs":
suggest = "batch_size_in_kbs"
elif key == "batchTimeInSec":
suggest = "batch_time_in_sec"
elif key == "functionId":
suggest = "function_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceConnectorTask. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceConnectorTask.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceConnectorTask.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
batch_size_in_kbs: Optional[int] = None,
batch_time_in_sec: Optional[int] = None,
condition: Optional[str] = None,
function_id: Optional[str] = None):
"""
:param str kind: (Updatable) The type descriminator.
:param int batch_size_in_kbs: (Updatable) Size limit (kilobytes) for batch sent to invoke the function.
:param int batch_time_in_sec: (Updatable) Time limit (seconds) for batch sent to invoke the function.
:param str condition: (Updatable) A filter or mask to limit the source used in the flow defined by the service connector.
:param str function_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
pulumi.set(__self__, "kind", kind)
if batch_size_in_kbs is not None:
pulumi.set(__self__, "batch_size_in_kbs", batch_size_in_kbs)
if batch_time_in_sec is not None:
pulumi.set(__self__, "batch_time_in_sec", batch_time_in_sec)
if condition is not None:
pulumi.set(__self__, "condition", condition)
if function_id is not None:
pulumi.set(__self__, "function_id", function_id)
@property
@pulumi.getter
def kind(self) -> str:
"""
(Updatable) The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="batchSizeInKbs")
def batch_size_in_kbs(self) -> Optional[int]:
"""
(Updatable) Size limit (kilobytes) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_size_in_kbs")
@property
@pulumi.getter(name="batchTimeInSec")
def batch_time_in_sec(self) -> Optional[int]:
"""
(Updatable) Time limit (seconds) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_time_in_sec")
@property
@pulumi.getter
def condition(self) -> Optional[str]:
"""
(Updatable) A filter or mask to limit the source used in the flow defined by the service connector.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> Optional[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@pulumi.output_type
class GetServiceConnectorSourceResult(dict):
def __init__(__self__, *,
cursor: 'outputs.GetServiceConnectorSourceCursorResult',
kind: str,
log_sources: Sequence['outputs.GetServiceConnectorSourceLogSourceResult'],
stream_id: str):
"""
:param 'GetServiceConnectorSourceCursorArgs' cursor: The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
:param str kind: The type descriminator.
:param Sequence['GetServiceConnectorSourceLogSourceArgs'] log_sources: The resources affected by this work request.
:param str stream_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
pulumi.set(__self__, "cursor", cursor)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "log_sources", log_sources)
pulumi.set(__self__, "stream_id", stream_id)
@property
@pulumi.getter
def cursor(self) -> 'outputs.GetServiceConnectorSourceCursorResult':
"""
The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
"""
return pulumi.get(self, "cursor")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="logSources")
def log_sources(self) -> Sequence['outputs.GetServiceConnectorSourceLogSourceResult']:
"""
The resources affected by this work request.
"""
return pulumi.get(self, "log_sources")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@pulumi.output_type
class GetServiceConnectorSourceCursorResult(dict):
def __init__(__self__, *,
kind: str):
"""
:param str kind: The type descriminator.
"""
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@pulumi.output_type
class GetServiceConnectorSourceLogSourceResult(dict):
def __init__(__self__, *,
compartment_id: str,
log_group_id: str,
log_id: str):
"""
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
:param str log_group_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str log_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "log_group_id", log_group_id)
pulumi.set(__self__, "log_id", log_id)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter(name="logId")
def log_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
return pulumi.get(self, "log_id")
@pulumi.output_type
class GetServiceConnectorTargetResult(dict):
def __init__(__self__, *,
batch_rollover_size_in_mbs: int,
batch_rollover_time_in_ms: int,
bucket: str,
compartment_id: str,
enable_formatted_messaging: bool,
function_id: str,
kind: str,
log_group_id: str,
metric: str,
metric_namespace: str,
namespace: str,
object_name_prefix: str,
stream_id: str,
topic_id: str):
"""
:param int batch_rollover_size_in_mbs: The batch rollover size in megabytes.
:param int batch_rollover_time_in_ms: The batch rollover time in milliseconds.
:param str bucket: The name of the bucket. Avoid entering confidential information.
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
:param bool enable_formatted_messaging: Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
:param str function_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
:param str kind: The type descriminator.
:param str log_group_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str metric: The name of the metric. Example: `CpuUtilization`
:param str metric_namespace: The namespace of the metric. Example: `oci_computeagent`
:param str namespace: The namespace.
:param str object_name_prefix: The prefix of the objects. Avoid entering confidential information.
:param str stream_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
:param str topic_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
pulumi.set(__self__, "batch_rollover_size_in_mbs", batch_rollover_size_in_mbs)
pulumi.set(__self__, "batch_rollover_time_in_ms", batch_rollover_time_in_ms)
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "enable_formatted_messaging", enable_formatted_messaging)
pulumi.set(__self__, "function_id", function_id)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "log_group_id", log_group_id)
pulumi.set(__self__, "metric", metric)
pulumi.set(__self__, "metric_namespace", metric_namespace)
pulumi.set(__self__, "namespace", namespace)
pulumi.set(__self__, "object_name_prefix", object_name_prefix)
pulumi.set(__self__, "stream_id", stream_id)
pulumi.set(__self__, "topic_id", topic_id)
@property
@pulumi.getter(name="batchRolloverSizeInMbs")
def batch_rollover_size_in_mbs(self) -> int:
"""
The batch rollover size in megabytes.
"""
return pulumi.get(self, "batch_rollover_size_in_mbs")
@property
@pulumi.getter(name="batchRolloverTimeInMs")
def batch_rollover_time_in_ms(self) -> int:
"""
The batch rollover time in milliseconds.
"""
return pulumi.get(self, "batch_rollover_time_in_ms")
@property
@pulumi.getter
def bucket(self) -> str:
"""
The name of the bucket. Avoid entering confidential information.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="enableFormattedMessaging")
def enable_formatted_messaging(self) -> bool:
"""
Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
"""
return pulumi.get(self, "enable_formatted_messaging")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter
def metric(self) -> str:
"""
The name of the metric. Example: `CpuUtilization`
"""
return pulumi.get(self, "metric")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> str:
"""
The namespace of the metric. Example: `oci_computeagent`
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The namespace.
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="objectNamePrefix")
def object_name_prefix(self) -> str:
"""
The prefix of the objects. Avoid entering confidential information.
"""
return pulumi.get(self, "object_name_prefix")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@property
@pulumi.getter(name="topicId")
def topic_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
return pulumi.get(self, "topic_id")
@pulumi.output_type
class GetServiceConnectorTaskResult(dict):
def __init__(__self__, *,
batch_size_in_kbs: int,
batch_time_in_sec: int,
condition: str,
function_id: str,
kind: str):
"""
:param int batch_size_in_kbs: Size limit (kilobytes) for batch sent to invoke the function.
:param int batch_time_in_sec: Time limit (seconds) for batch sent to invoke the function.
:param str condition: A filter or mask to limit the source used in the flow defined by the service connector.
:param str function_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
:param str kind: The type descriminator.
"""
pulumi.set(__self__, "batch_size_in_kbs", batch_size_in_kbs)
pulumi.set(__self__, "batch_time_in_sec", batch_time_in_sec)
pulumi.set(__self__, "condition", condition)
pulumi.set(__self__, "function_id", function_id)
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter(name="batchSizeInKbs")
def batch_size_in_kbs(self) -> int:
"""
Size limit (kilobytes) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_size_in_kbs")
@property
@pulumi.getter(name="batchTimeInSec")
def batch_time_in_sec(self) -> int:
"""
Time limit (seconds) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_time_in_sec")
@property
@pulumi.getter
def condition(self) -> str:
"""
A filter or mask to limit the source used in the flow defined by the service connector.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@pulumi.output_type
class GetServiceConnectorsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionResult(dict):
def __init__(__self__, *,
items: Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemResult']):
pulumi.set(__self__, "items", items)
@property
@pulumi.getter
def items(self) -> Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemResult']:
return pulumi.get(self, "items")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemResult(dict):
def __init__(__self__, *,
compartment_id: str,
defined_tags: Mapping[str, Any],
description: str,
display_name: str,
freeform_tags: Mapping[str, Any],
id: str,
lifecyle_details: str,
source: 'outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceResult',
state: str,
system_tags: Mapping[str, Any],
target: 'outputs.GetServiceConnectorsServiceConnectorCollectionItemTargetResult',
tasks: Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemTaskResult'],
time_created: str,
time_updated: str):
"""
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
:param Mapping[str, Any] defined_tags: Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param str description: The description of the resource. Avoid entering confidential information.
:param str display_name: A filter to return only resources that match the given display name exactly. Example: `example_service_connector`
:param Mapping[str, Any] freeform_tags: Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param str id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the service connector.
:param str lifecyle_details: A message describing the current state in more detail. For example, the message might provide actionable information for a resource in a `FAILED` state.
:param 'GetServiceConnectorsServiceConnectorCollectionItemSourceArgs' source: An object that represents the source of the flow defined by the service connector. An example source is the VCNFlow logs within the NetworkLogs group. For more information about flows defined by service connectors, see [Service Connector Hub Overview](https://docs.cloud.oracle.com/iaas/Content/service-connector-hub/overview.htm).
:param str state: A filter to return only resources that match the given lifecycle state. Example: `ACTIVE`
:param Mapping[str, Any] system_tags: The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
:param 'GetServiceConnectorsServiceConnectorCollectionItemTargetArgs' target: An object that represents the target of the flow defined by the service connector. An example target is a stream. For more information about flows defined by service connectors, see [Service Connector Hub Overview](https://docs.cloud.oracle.com/iaas/Content/service-connector-hub/overview.htm).
:param Sequence['GetServiceConnectorsServiceConnectorCollectionItemTaskArgs'] tasks: The list of tasks.
:param str time_created: The date and time when the service connector was created. Format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2020-01-25T21:10:29.600Z`
:param str time_updated: The date and time when the service connector was updated. Format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2020-01-25T21:10:29.600Z`
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "defined_tags", defined_tags)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "freeform_tags", freeform_tags)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "lifecyle_details", lifecyle_details)
pulumi.set(__self__, "source", source)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "system_tags", system_tags)
pulumi.set(__self__, "target", target)
pulumi.set(__self__, "tasks", tasks)
pulumi.set(__self__, "time_created", time_created)
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def description(self) -> str:
"""
The description of the resource. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
A filter to return only resources that match the given display name exactly. Example: `example_service_connector`
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the service connector.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lifecyleDetails")
def lifecyle_details(self) -> str:
"""
A message describing the current state in more detail. For example, the message might provide actionable information for a resource in a `FAILED` state.
"""
return pulumi.get(self, "lifecyle_details")
@property
@pulumi.getter
def source(self) -> 'outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceResult':
"""
An object that represents the source of the flow defined by the service connector. An example source is the VCNFlow logs within the NetworkLogs group. For more information about flows defined by service connectors, see [Service Connector Hub Overview](https://docs.cloud.oracle.com/iaas/Content/service-connector-hub/overview.htm).
"""
return pulumi.get(self, "source")
@property
@pulumi.getter
def state(self) -> str:
"""
A filter to return only resources that match the given lifecycle state. Example: `ACTIVE`
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Mapping[str, Any]:
"""
The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter
def target(self) -> 'outputs.GetServiceConnectorsServiceConnectorCollectionItemTargetResult':
"""
An object that represents the target of the flow defined by the service connector. An example target is a stream. For more information about flows defined by service connectors, see [Service Connector Hub Overview](https://docs.cloud.oracle.com/iaas/Content/service-connector-hub/overview.htm).
"""
return pulumi.get(self, "target")
@property
@pulumi.getter
def tasks(self) -> Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemTaskResult']:
"""
The list of tasks.
"""
return pulumi.get(self, "tasks")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time when the service connector was created. Format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2020-01-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> str:
"""
The date and time when the service connector was updated. Format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2020-01-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_updated")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemSourceResult(dict):
def __init__(__self__, *,
cursor: 'outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceCursorResult',
kind: str,
log_sources: Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceLogSourceResult'],
stream_id: str):
"""
:param 'GetServiceConnectorsServiceConnectorCollectionItemSourceCursorArgs' cursor: The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
:param str kind: The type descriminator.
:param Sequence['GetServiceConnectorsServiceConnectorCollectionItemSourceLogSourceArgs'] log_sources: The resources affected by this work request.
:param str stream_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
pulumi.set(__self__, "cursor", cursor)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "log_sources", log_sources)
pulumi.set(__self__, "stream_id", stream_id)
@property
@pulumi.getter
def cursor(self) -> 'outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceCursorResult':
"""
The type of [cursor](https://docs.cloud.oracle.com/iaas/Content/Streaming/Tasks/using_a_single_consumer.htm#usingcursors), which determines the starting point from which the stream will be consumed.
"""
return pulumi.get(self, "cursor")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="logSources")
def log_sources(self) -> Sequence['outputs.GetServiceConnectorsServiceConnectorCollectionItemSourceLogSourceResult']:
"""
The resources affected by this work request.
"""
return pulumi.get(self, "log_sources")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemSourceCursorResult(dict):
def __init__(__self__, *,
kind: str):
"""
:param str kind: The type descriminator.
"""
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemSourceLogSourceResult(dict):
def __init__(__self__, *,
compartment_id: str,
log_group_id: str,
log_id: str):
"""
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
:param str log_group_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str log_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "log_group_id", log_group_id)
pulumi.set(__self__, "log_id", log_id)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter(name="logId")
def log_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the log.
"""
return pulumi.get(self, "log_id")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemTargetResult(dict):
def __init__(__self__, *,
batch_rollover_size_in_mbs: int,
batch_rollover_time_in_ms: int,
bucket: str,
compartment_id: str,
enable_formatted_messaging: bool,
function_id: str,
kind: str,
log_group_id: str,
metric: str,
metric_namespace: str,
namespace: str,
object_name_prefix: str,
stream_id: str,
topic_id: str):
"""
:param int batch_rollover_size_in_mbs: The batch rollover size in megabytes.
:param int batch_rollover_time_in_ms: The batch rollover time in milliseconds.
:param str bucket: The name of the bucket. Avoid entering confidential information.
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
:param bool enable_formatted_messaging: Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
:param str function_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
:param str kind: The type descriminator.
:param str log_group_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
:param str metric: The name of the metric. Example: `CpuUtilization`
:param str metric_namespace: The namespace of the metric. Example: `oci_computeagent`
:param str namespace: The namespace.
:param str object_name_prefix: The prefix of the objects. Avoid entering confidential information.
:param str stream_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
:param str topic_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
pulumi.set(__self__, "batch_rollover_size_in_mbs", batch_rollover_size_in_mbs)
pulumi.set(__self__, "batch_rollover_time_in_ms", batch_rollover_time_in_ms)
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "enable_formatted_messaging", enable_formatted_messaging)
pulumi.set(__self__, "function_id", function_id)
pulumi.set(__self__, "kind", kind)
pulumi.set(__self__, "log_group_id", log_group_id)
pulumi.set(__self__, "metric", metric)
pulumi.set(__self__, "metric_namespace", metric_namespace)
pulumi.set(__self__, "namespace", namespace)
pulumi.set(__self__, "object_name_prefix", object_name_prefix)
pulumi.set(__self__, "stream_id", stream_id)
pulumi.set(__self__, "topic_id", topic_id)
@property
@pulumi.getter(name="batchRolloverSizeInMbs")
def batch_rollover_size_in_mbs(self) -> int:
"""
The batch rollover size in megabytes.
"""
return pulumi.get(self, "batch_rollover_size_in_mbs")
@property
@pulumi.getter(name="batchRolloverTimeInMs")
def batch_rollover_time_in_ms(self) -> int:
"""
The batch rollover time in milliseconds.
"""
return pulumi.get(self, "batch_rollover_time_in_ms")
@property
@pulumi.getter
def bucket(self) -> str:
"""
The name of the bucket. Avoid entering confidential information.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment for this request.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="enableFormattedMessaging")
def enable_formatted_messaging(self) -> bool:
"""
Whether to apply a simplified, user-friendly format to the message. Applies only when friendly formatting is supported by the service connector source and the subscription protocol. Example: `true`
"""
return pulumi.get(self, "enable_formatted_messaging")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="logGroupId")
def log_group_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Logging Analytics log group.
"""
return pulumi.get(self, "log_group_id")
@property
@pulumi.getter
def metric(self) -> str:
"""
The name of the metric. Example: `CpuUtilization`
"""
return pulumi.get(self, "metric")
@property
@pulumi.getter(name="metricNamespace")
def metric_namespace(self) -> str:
"""
The namespace of the metric. Example: `oci_computeagent`
"""
return pulumi.get(self, "metric_namespace")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The namespace.
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="objectNamePrefix")
def object_name_prefix(self) -> str:
"""
The prefix of the objects. Avoid entering confidential information.
"""
return pulumi.get(self, "object_name_prefix")
@property
@pulumi.getter(name="streamId")
def stream_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the stream.
"""
return pulumi.get(self, "stream_id")
@property
@pulumi.getter(name="topicId")
def topic_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the topic.
"""
return pulumi.get(self, "topic_id")
@pulumi.output_type
class GetServiceConnectorsServiceConnectorCollectionItemTaskResult(dict):
def __init__(__self__, *,
batch_size_in_kbs: int,
batch_time_in_sec: int,
condition: str,
function_id: str,
kind: str):
"""
:param int batch_size_in_kbs: Size limit (kilobytes) for batch sent to invoke the function.
:param int batch_time_in_sec: Time limit (seconds) for batch sent to invoke the function.
:param str condition: A filter or mask to limit the source used in the flow defined by the service connector.
:param str function_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
:param str kind: The type descriminator.
"""
pulumi.set(__self__, "batch_size_in_kbs", batch_size_in_kbs)
pulumi.set(__self__, "batch_time_in_sec", batch_time_in_sec)
pulumi.set(__self__, "condition", condition)
pulumi.set(__self__, "function_id", function_id)
pulumi.set(__self__, "kind", kind)
@property
@pulumi.getter(name="batchSizeInKbs")
def batch_size_in_kbs(self) -> int:
"""
Size limit (kilobytes) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_size_in_kbs")
@property
@pulumi.getter(name="batchTimeInSec")
def batch_time_in_sec(self) -> int:
"""
Time limit (seconds) for batch sent to invoke the function.
"""
return pulumi.get(self, "batch_time_in_sec")
@property
@pulumi.getter
def condition(self) -> str:
"""
A filter or mask to limit the source used in the flow defined by the service connector.
"""
return pulumi.get(self, "condition")
@property
@pulumi.getter(name="functionId")
def function_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the function to be used as a task.
"""
return pulumi.get(self, "function_id")
@property
@pulumi.getter
def kind(self) -> str:
"""
The type descriminator.
"""
return pulumi.get(self, "kind")
| 42.520385
| 417
| 0.656276
| 6,633
| 57,360
| 5.472185
| 0.046585
| 0.019864
| 0.035457
| 0.051822
| 0.82938
| 0.813759
| 0.800149
| 0.780092
| 0.777172
| 0.768108
| 0
| 0.002303
| 0.235443
| 57,360
| 1,348
| 418
| 42.551929
| 0.825356
| 0.37819
| 0
| 0.735558
| 1
| 0.005135
| 0.180589
| 0.087139
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165597
| false
| 0
| 0.007702
| 0.005135
| 0.333761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
baa9d24339d965ea634d194eee7d7aeaa8f32cd9
| 192
|
py
|
Python
|
src/utils/__init__.py
|
develamove/courier-api
|
8a946178ece9b563d8dafdc8ef9277898d7bb041
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
develamove/courier-api
|
8a946178ece9b563d8dafdc8ef9277898d7bb041
|
[
"MIT"
] | null | null | null |
src/utils/__init__.py
|
develamove/courier-api
|
8a946178ece9b563d8dafdc8ef9277898d7bb041
|
[
"MIT"
] | null | null | null |
from .constants import *
from .decorators import *
from .exceptions import *
from .helpers import *
from .request_helpers import *
from .query_helpers import *
from .custom_validator import *
| 24
| 31
| 0.78125
| 24
| 192
| 6.125
| 0.416667
| 0.408163
| 0.346939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 192
| 7
| 32
| 27.428571
| 0.896341
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2439fc86759679fb3df996cfb5bf274170a202f9
| 218
|
py
|
Python
|
software/runYoloToCaffe.py
|
marquito77/rapidus
|
2e473431af341b291ea94ada4acc38587652c1f0
|
[
"MIT"
] | null | null | null |
software/runYoloToCaffe.py
|
marquito77/rapidus
|
2e473431af341b291ea94ada4acc38587652c1f0
|
[
"MIT"
] | null | null | null |
software/runYoloToCaffe.py
|
marquito77/rapidus
|
2e473431af341b291ea94ada4acc38587652c1f0
|
[
"MIT"
] | null | null | null |
import rapidus as rpd
rpd.convertYoloToCaffe("./data/models/rapidus-1.cfg", "./data/models/rapidus-1.weights")
print()
rpd.convertYoloToCaffe("./data/models/rapidus-hagl10.cfg", "./data/models/rapidus-hagl10.weights")
| 43.6
| 98
| 0.766055
| 29
| 218
| 5.758621
| 0.413793
| 0.239521
| 0.407186
| 0.371257
| 0.45509
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028708
| 0.041284
| 218
| 5
| 98
| 43.6
| 0.770335
| 0
| 0
| 0
| 0
| 0
| 0.575342
| 0.575342
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79fd43f84c1e56b19d113ed3696a37cc5254a7b2
| 83
|
py
|
Python
|
Diamond/Python/test_diamond.py
|
emilybache/start-points-custom
|
8111ae5165a778de181e047c83e132cc96c3c3b2
|
[
"MIT"
] | 2
|
2018-08-25T07:34:13.000Z
|
2020-10-11T19:59:32.000Z
|
Diamond/Python/test_diamond.py
|
emilybache/start-points-custom
|
8111ae5165a778de181e047c83e132cc96c3c3b2
|
[
"MIT"
] | 1
|
2017-06-13T06:57:24.000Z
|
2017-06-13T06:57:24.000Z
|
Diamond/Python/test_diamond.py
|
emilybache/start-points-custom
|
8111ae5165a778de181e047c83e132cc96c3c3b2
|
[
"MIT"
] | 1
|
2017-06-12T13:14:55.000Z
|
2017-06-12T13:14:55.000Z
|
import diamond
def test_a():
assert diamond.Diamond('A').print_diamond() == "A"
| 13.833333
| 51
| 0.686747
| 12
| 83
| 4.583333
| 0.583333
| 0.290909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 83
| 5
| 52
| 16.6
| 0.763889
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0320c36b479e2c24d476be340ab920ba6ba7c440
| 21,090
|
py
|
Python
|
lambda/GetSurvey/realsurvey.py
|
cal-poly-dxhub/familycaresurveytool
|
2adac2af91abdc3d6bd7dc5b85e1801ca4071687
|
[
"Apache-2.0"
] | null | null | null |
lambda/GetSurvey/realsurvey.py
|
cal-poly-dxhub/familycaresurveytool
|
2adac2af91abdc3d6bd7dc5b85e1801ca4071687
|
[
"Apache-2.0"
] | 4
|
2020-08-03T21:53:53.000Z
|
2022-02-26T23:42:03.000Z
|
lambda/GetSurvey/realsurvey.py
|
cal-poly-dxhub/familycaresurveytool
|
2adac2af91abdc3d6bd7dc5b85e1801ca4071687
|
[
"Apache-2.0"
] | null | null | null |
survey = [{'question': 'Where do you live?',
'answers': [{'answer': 'Arroyo Grande',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Atascadero',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Avila Beach',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Bradley',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Buellton',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Cambria',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Cayucos',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Creston',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Grover Beach',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Guadalupe',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Harmony',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Heritage Ranch',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Lompoc',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Los Osos',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Morro Bay',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Nipomo',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Oceano',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Orcutt',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Paso Robles',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Pismo Beach',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'San Luis Obispo',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'San Miguel',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'San Simeon',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Santa Margarita',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Santa Maria',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Shandon',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Templeton',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Other Santa Barbara County',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 1,
'tutor': 1,
'career mentor': 1,
'volunteer': 4,
'donate/support': 4},
{'answer': 'Outside the Central Coast',
'foster parent': 1,
'respite/tcp': 1,
'host home': 1,
'mentor': 1,
'tutor': 1,
'career mentor': 1,
'volunteer': 3,
'donate/support': 6}]},
{'question': 'What is your household make up?',
'answers': [{'answer': '1) Single no kids',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '2) Single w/ kids',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '3) Spouse/partner no kids',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '4) Spouse/partner w/ kids',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100}]},
{'question': 'How would you describe your work life?',
'answers': [{'answer': '1) Work full-time away from home',
'foster parent': 5,
'respite/tcp': 4,
'host home': 5,
'mentor': 4,
'tutor': 2,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': '2) Work full-time from home',
'foster parent': 6,
'respite/tcp': 5,
'host home': 6,
'mentor': 4,
'tutor': 4,
'career mentor': 5,
'volunteer': 4,
'donate/support': 4},
{'answer': '3) Work full-time but have flexibility',
'foster parent': 5,
'respite/tcp': 4,
'host home': 5,
'mentor': 8,
'tutor': 8,
'career mentor': 8,
'volunteer': 7,
'donate/support': 4},
{'answer': '4) Am a stay-at-home parent',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 8,
'donate/support': 4},
{'answer': '5) Work part-time',
'foster parent': 6,
'respite/tcp': 4,
'host home': 6,
'mentor': 5,
'tutor': 8,
'career mentor': 8,
'volunteer': 7,
'donate/support': 4},
{'answer': '6a) I am financially stable',
'parent': '6) Not currently working',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 6,
'donate/support': 9},
{'answer': '6b) I am currently looking for employment',
'parent': '6) Not currently working',
'foster parent': 1,
'respite/tcp': 1,
'host home': 1,
'mentor': 2,
'tutor': 4,
'career mentor': 2,
'volunteer': 6,
'donate/support': 3},
{'answer': '7) Am retired',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 5,
'tutor': 4,
'career mentor': 4,
'volunteer': 7,
'donate/support': 4}]},
{'question': 'How would you describe your spouse\'s work life?',
'answers': [{'answer': '1) Work full-time away from home',
'foster parent': 5,
'respite/tcp': 4,
'host home': 5,
'mentor': 4,
'tutor': 2,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': '2) Work full-time from home',
'foster parent': 6,
'respite/tcp': 5,
'host home': 6,
'mentor': 4,
'tutor': 4,
'career mentor': 5,
'volunteer': 4,
'donate/support': 4},
{'answer': '3) Work full-time but have flexibility',
'foster parent': 5,
'respite/tcp': 4,
'host home': 5,
'mentor': 8,
'tutor': 8,
'career mentor': 8,
'volunteer': 7,
'donate/support': 4},
{'answer': '4) Am a stay-at-home parent',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 8,
'donate/support': 4},
{'answer': '5) Work part-time',
'foster parent': 6,
'respite/tcp': 4,
'host home': 6,
'mentor': 5,
'tutor': 8,
'career mentor': 8,
'volunteer': 7,
'donate/support': 4},
{'answer': '6a) My spouse is financially stable',
'parent': '6) Not currently working',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 6,
'donate/support': 9},
{'answer': '6b) My spouse is currently looking for employment',
'parent': '6) Not currently working',
'foster parent': 1,
'respite/tcp': 1,
'host home': 1,
'mentor': 2,
'tutor': 4,
'career mentor': 2,
'volunteer': 6,
'donate/support': 3},
{'answer': '7) Am retired',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 5,
'tutor': 4,
'career mentor': 4,
'volunteer': 7,
'donate/support': 4},
{'answer': '8) Not applicable. I do not have a spouse',
'foster parent': 0,
'respite/tcp': 0,
'host home': 0,
'mentor': 0,
'tutor': 0,
'career mentor': 0,
'volunteer': 0,
'donate/support': 0}]},
{'question': 'How do you see yourself becoming involved with foster care?',
'answers': [{'answer': '1) I want to provide a foster child/youth with a home',
'foster parent': 8,
'respite/tcp': 6,
'host home': 6,
'mentor': 4,
'tutor': 2,
'career mentor': 2,
'volunteer': 4,
'donate/support': 4},
{'answer': "2) I want to be involved in a foster child/youth's life",
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 8,
'tutor': 8,
'career mentor': 8,
'volunteer': 5,
'donate/support': 5},
{'answer': '3) I want to support foster parents and foster children as needed',
'foster parent': 6,
'respite/tcp': 8,
'host home': 8,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': "4) I know I want to be involved, but don't know how",
'foster parent': 3,
'respite/tcp': 4,
'host home': 4,
'mentor': 3,
'tutor': 4,
'career mentor': 4,
'volunteer': 6,
'donate/support': 4},
{'answer': '5) I just want to support the agency',
'foster parent': 3,
'respite/tcp': 2,
'host home': 2,
'mentor': 2,
'tutor': 4,
'career mentor': 4,
'volunteer': 7,
'donate/support': 9}]},
{'question': 'How much time do you want to invest?',
'answers': [{'answer': '1) Full Time',
'foster parent': 7,
'respite/tcp': 4,
'host home': 7,
'mentor': 2,
'tutor': 3,
'career mentor': 2,
'volunteer': 4,
'donate/support': 4},
{'answer': '2) Weekly',
'foster parent': 1,
'respite/tcp': 5,
'host home': 1,
'mentor': 4,
'tutor': 8,
'career mentor': 4,
'volunteer': 8,
'donate/support': 4},
{'answer': '3) Monthly',
'foster parent': 1,
'respite/tcp': 7,
'host home': 1,
'mentor': 8,
'tutor': 4,
'career mentor': 8,
'volunteer': 4,
'donate/support': 7},
{'answer': '4) Periodically',
'foster parent': 1,
'respite/tcp': 3,
'host home': 1,
'mentor': 3,
'tutor': 4,
'career mentor': 4,
'volunteer': 5,
'donate/support': 5}]},
{'question': 'How much do you know about foster care?',
'answers': [{'answer': '1) I know a lot about foster care',
'foster parent': 6,
'respite/tcp': 6,
'host home': 6,
'mentor': 2,
'tutor': 2,
'career mentor': 2,
'volunteer': 2,
'donate/support': 6},
{'answer': '2) I want to become involved/know very little',
'foster parent': 5,
'respite/tcp': 5,
'host home': 5,
'mentor': 4,
'tutor': 5,
'career mentor': 5,
'volunteer': 5,
'donate/support': 6},
{'answer': '3) I have no idea what foster care involves',
'foster parent': 4,
'respite/tcp': 5,
'host home': 4,
'mentor': 5,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4}]},
{'question': 'What is your current level of interest?',
'answers': [{'answer': '1) Very interested and want to get started right away',
'foster parent': 7,
'respite/tcp': 7,
'host home': 7,
'mentor': 7,
'tutor': 7,
'career mentor': 7,
'volunteer': 7,
'donate/support': 7},
{'answer': '2) I am interested, but I would like additional information and guidance',
'foster parent': 6,
'respite/tcp': 6,
'host home': 6,
'mentor': 6,
'tutor': 6,
'career mentor': 6,
'volunteer': 6,
'donate/support': 6},
{'answer': '3) Thinking about getting involved in the next 6–9 months',
'foster parent': 5,
'respite/tcp': 5,
'host home': 5,
'mentor': 5,
'tutor': 5,
'career mentor': 5,
'volunteer': 5,
'donate/support': 5},
{'answer': '4) I am just exploring possibilities right now',
'foster parent': 3,
'respite/tcp': 2,
'host home': 4,
'mentor': 3,
'tutor': 2,
'career mentor': 3,
'volunteer': 4,
'donate/support': 4}]},
{'question': 'How do you want to make an impact?',
'answers': [
{'answer': '1a) I have an extra bedroom(s)',
'parent': '1) I want to help foster children/youth recover',
'foster parent': 7,
'respite/tcp': 7,
'host home': 7,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': '1b) I have a spare bed',
'parent': '1) I want to help foster children/youth recover',
'foster parent': 6,
'respite/tcp': 6,
'host home': 4,
'mentor': 2,
'tutor': 2,
'career mentor': 2,
'volunteer': 2,
'donate/support': 4},
{'answer': '1c) I don’t have extra space in my home, but am moving',
'parent': '1) I want to help foster children/youth recover',
'foster parent': 5,
'respite/tcp': 5,
'host home': 5,
'mentor': 3,
'tutor': 2,
'career mentor': 2,
'volunteer': 2,
'donate/support': 4},
{'answer': '1d) No Space',
'parent': '1) I want to help foster children/youth recover',
'foster parent': 4,
'respite/tcp': 5,
'host home': 4,
'mentor': 5,
'tutor': 5,
'career mentor': 5,
'volunteer': 5,
'donate/support': 5},
{'answer': '2) I want to use my life exiperiences to enrich a foster child/youth',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 8,
'tutor': 8,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': '3) I want to use my professional experience to help',
'foster parent': 4,
'respite/tcp': 4,
'host home': 4,
'mentor': 5,
'tutor': 7,
'career mentor': 8,
'volunteer': 4,
'donate/support': 4},
{'answer': '4) I want to care for foster children/youth but not full time',
'foster parent': 1,
'respite/tcp': 6,
'host home': 4,
'mentor': 4,
'tutor': 4,
'career mentor': 4,
'volunteer': 4,
'donate/support': 4},
{'answer': '5) I want to give back to my community in some way',
'foster parent': 4,
'respite/tcp': 3,
'host home': 4,
'mentor': 5,
'tutor': 5,
'career mentor': 5,
'volunteer': 8,
'donate/support': 8}]},
{'question': 'How is your support network?',
'answers': [{'answer': '1) I have many close friends and family members',
'foster parent': 7,
'respite/tcp': 8,
'host home': 7,
'mentor': 6,
'tutor': 6,
'career mentor': 6,
'volunteer': 6,
'donate/support': 4},
{'answer': '2) I have a few close friends and family members',
'foster parent': 6,
'respite/tcp': 6,
'host home': 6,
'mentor': 5,
'tutor': 6,
'career mentor': 6,
'volunteer': 6,
'donate/support': 4},
{'answer': '3) I have no close friends or family members',
'foster parent': 3,
'respite/tcp': 2,
'host home': 2,
'mentor': 3,
'tutor': 3,
'career mentor': 3,
'volunteer': 4,
'donate/support': 4}]},
{'question': 'What is your current community involvement?',
'answers': [{'answer': '1) I am regularly involved in social, community, and/or faith organizations',
'foster parent': 7,
'respite/tcp': 8,
'host home': 7,
'mentor': 6,
'tutor': 4,
'career mentor': 4,
'volunteer': 7,
'donate/support': 4},
{'answer': '2) I am occasionally involved in social, community, and/or faith organizations',
'foster parent': 6,
'respite/tcp': 6,
'host home': 6,
'mentor': 5,
'tutor': 4,
'career mentor': 4,
'volunteer': 6,
'donate/support': 4},
{'answer': '3) I am never involved in social, community, and/or faith organizations',
'foster parent': 3,
'respite/tcp': 2,
'host home': 2,
'mentor': 3,
'tutor': 3,
'career mentor': 3,
'volunteer': 4,
'donate/support': 4}]},
{'question': 'What are your hobbies and interests?',
'answers': [{'answer': '1) Faith-based Activities',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '2) Traveling',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '3) Pets/farm animals',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '4) Volunteer/community involvement',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '5) Home-centered Activities',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '6) Enjoy the outdoors',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '7) Parenting/care-giving',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '8) Live Entertainment',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '9) Electronic Entertainment',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '10) Being Physically Active',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100},
{'answer': '11) Enjoying Solitary Activities',
'foster parent': -100,
'respite/tcp': -100,
'host home': -100,
'mentor': -100,
'tutor': -100,
'career mentor': -100,
'volunteer': -100,
'donate/support': -100}]}]
| 25.077289
| 103
| 0.532148
| 2,658
| 21,090
| 4.222724
| 0.088412
| 0.097292
| 0.073592
| 0.096222
| 0.830809
| 0.803635
| 0.785816
| 0.771205
| 0.753386
| 0.735299
| 0
| 0.067204
| 0.264817
| 21,090
| 840
| 104
| 25.107143
| 0.656627
| 0
| 0
| 0.867857
| 0
| 0
| 0.535704
| 0.000996
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0322dab0180be7adf4eaa2276bbca4219921e96c
| 2,184
|
py
|
Python
|
diffmask/utils/callbacks.py
|
xiye17/diffmask
|
6ae62ce58bf9bf5ea0b0ac23b196c52b7ee97c48
|
[
"MIT"
] | 45
|
2020-05-01T08:44:19.000Z
|
2022-03-25T12:18:03.000Z
|
diffmask/utils/callbacks.py
|
xiye17/diffmask
|
6ae62ce58bf9bf5ea0b0ac23b196c52b7ee97c48
|
[
"MIT"
] | 1
|
2020-09-04T03:33:41.000Z
|
2020-09-27T12:21:09.000Z
|
diffmask/utils/callbacks.py
|
xiye17/diffmask
|
6ae62ce58bf9bf5ea0b0ac23b196c52b7ee97c48
|
[
"MIT"
] | 2
|
2021-02-04T17:21:32.000Z
|
2021-03-05T12:46:16.000Z
|
import torch
import pytorch_lightning as pl
class CallbackSST(pl.Callback):
def on_validation_end(self, trainer, pl_module):
print(
"Epoch {}: Validation accuracy = {:.2f}, F1 = {:.2f}".format(
trainer.callback_metrics["epoch"] + 1,
trainer.callback_metrics["val_acc"] * 100,
trainer.callback_metrics["val_f1"] * 100,
)
)
class CallbackSSTDiffMask(pl.Callback):
def on_validation_end(self, trainer, pl_module):
print(
"Epoch {}: Validation accuracy = {:.2f}, F1 = {:.2f}, gates at zero = {:.2%}, constraint = {:.5f}".format(
trainer.callback_metrics["epoch"] + 1,
trainer.callback_metrics["val_acc"] * 100,
trainer.callback_metrics["val_f1"] * 100,
1 - trainer.callback_metrics["val_l0"],
trainer.callback_metrics["val_loss_c"],
)
)
class CallbackSquadDiffMask(pl.Callback):
def on_validation_end(self, trainer, pl_module):
print(
"Epoch {}: Validation accuracy = {:.2f}, gates at zero = {:.2%}, constraint = {:.5f}".format(
trainer.callback_metrics["epoch"] + 1,
trainer.callback_metrics["val_acc"] * 100,
1 - trainer.callback_metrics["val_l0"],
trainer.callback_metrics["val_loss_c"],
)
)
class CallbackToyTask(pl.Callback):
def on_validation_end(self, trainer, pl_module):
print(
"Epoch {}: Validation accuracy = {:.2f}".format(
trainer.callback_metrics["epoch"] + 1,
trainer.callback_metrics["val_acc"],
)
)
class CallbackToyTaskDiffMask(pl.Callback):
def on_validation_end(self, trainer, pl_module):
print(
"Epoch {}: Validation accuracy = {:.2f}, gates at zero = {:.2%}, constraint = {:.5f}".format(
trainer.callback_metrics["epoch"] + 1,
trainer.callback_metrics["val_acc"],
1 - trainer.callback_metrics["val_l0"],
trainer.callback_metrics["val_loss_c"],
)
)
| 35.803279
| 118
| 0.55815
| 220
| 2,184
| 5.313636
| 0.181818
| 0.230967
| 0.338751
| 0.278015
| 0.879384
| 0.879384
| 0.879384
| 0.879384
| 0.879384
| 0.879384
| 0
| 0.028458
| 0.30815
| 2,184
| 60
| 119
| 36.4
| 0.745202
| 0
| 0
| 0.6
| 0
| 0.02
| 0.215659
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.04
| 0
| 0.24
| 0.1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
034931ee2a1b5880d22d4dda584501399d09274d
| 30,892
|
py
|
Python
|
edge-bootstrap/python/edgectl/test/utils/test_certutil_unit.py
|
CIPop/iotedge
|
401b6d19effbb2d5f347434ce0dc01599cefe93e
|
[
"MIT"
] | 3
|
2018-12-27T18:15:15.000Z
|
2020-02-12T05:23:09.000Z
|
edge-bootstrap/python/edgectl/test/utils/test_certutil_unit.py
|
CIPop/iotedge
|
401b6d19effbb2d5f347434ce0dc01599cefe93e
|
[
"MIT"
] | 2
|
2018-12-28T04:48:34.000Z
|
2019-01-15T21:11:30.000Z
|
edge-bootstrap/python/edgectl/test/utils/test_certutil_unit.py
|
CIPop/iotedge
|
401b6d19effbb2d5f347434ce0dc01599cefe93e
|
[
"MIT"
] | 2
|
2018-11-06T23:54:28.000Z
|
2019-04-03T06:38:47.000Z
|
"""Implementation of tests for module `edgectl.utils.certutil.py`."""
from __future__ import print_function
import sys
import unittest
from mock import mock, patch, mock_open, MagicMock
from OpenSSL import crypto
import edgectl.errors
from edgectl.utils import EdgeCertUtil
from edgectl.config import EdgeConstants as EC
if sys.version_info[0] < 3:
OPEN_BUILTIN = '__builtin__.open'
else:
OPEN_BUILTIN = 'builtins.open'
VALID_SUBJECT_DICT = {
EC.SUBJECT_COUNTRY_KEY: 'TC',
EC.SUBJECT_STATE_KEY: 'Test State',
EC.SUBJECT_LOCALITY_KEY: 'Test Locality',
EC.SUBJECT_ORGANIZATION_KEY: 'Test Organization',
EC.SUBJECT_ORGANIZATION_UNIT_KEY: 'Test Unit',
EC.SUBJECT_COMMON_NAME_KEY: 'Test CommonName'
}
INVALID_FILE = 'invalid_file'
CA_OWNER_CERT_FILE_NAME = 'test_ca_owner_cert.pem'
CA_CERT_FILE_NAME = 'test_ca_cert.pem'
CA_CHAIN_CERT_FILE_NAME = 'test_ca_chain_cert.pem'
CA_PRIVATE_KEY_FILE_NAME = 'test_ca_private.pem'
# pylint: disable=C0103
# disables invalid method name warning which is triggered because the test names are long
class TestEdgeCertUtilAPIIsValidCertSubject(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.is_valid_certificate_subject"""
def test_certificate_subject_valid(self):
"""
Test API validate_certificate_subject returns True when correct inputs are used
"""
self.assertTrue(EdgeCertUtil.is_valid_certificate_subject(VALID_SUBJECT_DICT))
string_val_64 = 'a' * 64
string_val_128 = 'a' * 128
valid_lengths_dict = {
EC.SUBJECT_COUNTRY_KEY: ['AB'],
EC.SUBJECT_STATE_KEY: ['', string_val_128],
EC.SUBJECT_LOCALITY_KEY: ['', string_val_128],
EC.SUBJECT_ORGANIZATION_KEY: ['', string_val_64],
EC.SUBJECT_ORGANIZATION_UNIT_KEY: ['', string_val_64],
EC.SUBJECT_COMMON_NAME_KEY: [string_val_64],
}
for key in list(VALID_SUBJECT_DICT.keys()):
test_dict = VALID_SUBJECT_DICT.copy()
for test_case in list(valid_lengths_dict[key]):
test_dict[key] = test_case
self.assertTrue(EdgeCertUtil.is_valid_certificate_subject(test_dict), key)
def test_certificate_subject_invalid(self):
"""
Test API validate_certificate_subject returns False when incorrect inputs are used
"""
# delete keys from dict
for key in list(VALID_SUBJECT_DICT.keys()):
test_dict = VALID_SUBJECT_DICT.copy()
del test_dict[key]
self.assertFalse(EdgeCertUtil.is_valid_certificate_subject(test_dict), key)
# test with invalid values
string_val_65 = 'a' * 65
string_val_129 = 'a' * 129
invalid_lengths_dict = {
EC.SUBJECT_COUNTRY_KEY: [None, '', 'A', 'ABC'],
EC.SUBJECT_STATE_KEY: [None, string_val_129],
EC.SUBJECT_LOCALITY_KEY: [None, string_val_129],
EC.SUBJECT_ORGANIZATION_KEY: [None, string_val_65],
EC.SUBJECT_ORGANIZATION_UNIT_KEY: [None, string_val_65],
EC.SUBJECT_COMMON_NAME_KEY: [None, '', string_val_65],
}
for key in list(VALID_SUBJECT_DICT.keys()):
test_dict = VALID_SUBJECT_DICT.copy()
for test_case in list(invalid_lengths_dict[key]):
test_dict[key] = test_case
self.assertFalse(EdgeCertUtil.is_valid_certificate_subject(test_dict), key)
class TestEdgeCertUtilAPICreateRootCACert(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.create_root_ca_cert"""
def test_create_root_ca_cert_duplicate_ids_invalid(self):
"""
Test API create_root_ca_cert raises exception when duplicate id's are used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
def test_create_root_ca_cert_validity_days_invalid(self):
"""
Test API create_root_ca_cert raises exception when invalid validity day values are used
"""
cert_util = EdgeCertUtil()
for validity in [-1, 0, 366, 1096]:
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root',
subject_dict=VALID_SUBJECT_DICT,
validity_days_from_now=validity)
def test_create_root_ca_cert_subject_dict_invalid(self):
"""
Test API create_root_ca_cert raises exception when invalid cert dicts are used
"""
cert_util = EdgeCertUtil()
with patch('edgectl.utils.EdgeCertUtil.is_valid_certificate_subject',
MagicMock(return_value=False)):
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root',
subject_dict=VALID_SUBJECT_DICT)
def test_create_root_ca_cert_passphrase_invalid(self):
"""
Test API set_ca_cert raises exception when passphrase is invalid
"""
cert_util = EdgeCertUtil()
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root',
subject_dict=VALID_SUBJECT_DICT,
passphrase='')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root',
subject_dict=VALID_SUBJECT_DICT,
passphrase='123')
bad_pass_1024 = 'a' * 1024
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_root_ca_cert('root',
subject_dict=VALID_SUBJECT_DICT,
passphrase=bad_pass_1024)
class TestEdgeCertUtilAPISetCACert(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.set_ca_cert"""
def test_set_ca_cert_missing_args_invalid(self):
"""
Test API set_ca_cert raises exception when all required args are not provided
"""
cert_util = EdgeCertUtil()
with patch('edgectl.utils.EdgeUtils.check_if_file_exists', MagicMock(return_value=True)):
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME)
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKEDPASSWORD')) as mocked_open:
mocked_open.side_effect = IOError()
@staticmethod
def _check_if_file_exists_helper(file_name):
if file_name == INVALID_FILE:
return False
return True
def test_set_ca_cert_missing_cert_files_invalid(self):
"""
Test API set_ca_cert raises exception when files found to not exist
"""
cert_util = EdgeCertUtil()
with patch('edgectl.utils.EdgeUtils.check_if_file_exists') as mock_check_file:
mock_check_file.side_effect = self._check_if_file_exists_helper
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=INVALID_FILE,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=INVALID_FILE,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=INVALID_FILE,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=INVALID_FILE)
def test_set_ca_cert_passphrase_invalid(self):
"""
Test API set_ca_cert raises exception when passphrase is invalid
"""
cert_util = EdgeCertUtil()
with patch('edgectl.utils.EdgeUtils.check_if_file_exists', MagicMock(return_value=True)):
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='123')
bad_pass_1024 = 'a' * 1024
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase=bad_pass_1024)
def test_set_ca_cert_open_failure_invalid(self):
"""
Test API set_ca_cert raises exception when open() cert private key file fails
"""
cert_util = EdgeCertUtil()
with patch('edgectl.utils.EdgeUtils.check_if_file_exists', MagicMock(return_value=True)):
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')) as mocked_open:
mocked_open.side_effect = IOError()
with self.assertRaises(edgectl.errors.EdgeFileAccessError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mocked_open.assert_called_with(CA_PRIVATE_KEY_FILE_NAME, 'rb')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_load_privatekey_failure_invalid(self, mock_util_chk, mock_load_pk):
"""
Test API set_ca_cert raises exception when calling API load_privatekey
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')) as mocked_open:
mock_load_pk.side_effect = crypto.Error()
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mocked_open.assert_called_with(CA_PRIVATE_KEY_FILE_NAME, 'rb')
mock_load_pk.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED', passphrase='1234')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_check_type_error_invalid(self, mock_util_chk, mock_load_pk, mock_check_pk):
"""
Test API set_ca_cert raises exception when private key check fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')) as mocked_open:
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.side_effect = TypeError()
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mocked_open.assert_called_with(CA_PRIVATE_KEY_FILE_NAME, 'rb')
mock_load_pk.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED', passphrase='1234')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_check_crypto_error_invalid(self, mock_util_chk,
mock_load_pk, mock_check_pk):
"""
Test API set_ca_cert raises exception when private key check fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')) as mocked_open:
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.side_effect = crypto.Error()
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mocked_open.assert_called_with(CA_PRIVATE_KEY_FILE_NAME, 'rb')
mock_load_pk.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED', passphrase='1234')
@mock.patch('OpenSSL.crypto.load_certificate')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_load_cert_failure_invalid(self, mock_util_chk, mock_load_pk,
mock_check_pk, mock_load_cert):
"""
Test API set_ca_cert raises exception when loading certificate fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')):
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.return_value = True
mock_load_cert.side_effect = crypto.Error()
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mock_load_cert.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED')
@mock.patch('OpenSSL.crypto.load_certificate')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_load_cert_io_failure_invalid(self, mock_util_chk, mock_load_pk,
mock_check_pk, mock_load_cert):
"""
Test API set_ca_cert raises exception when loading certificate fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')):
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.return_value = True
mock_load_cert.side_effect = IOError()
with self.assertRaises(edgectl.errors.EdgeFileAccessError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mock_load_cert.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED')
# pylint: disable=R0913
# disabling too many arguments warning
@mock.patch('OpenSSL.crypto.X509.has_expired')
@mock.patch('OpenSSL.crypto.load_certificate')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_load_expired_cert_invalid(self, mock_util_chk, mock_load_pk,
mock_check_pk, mock_load_cert, mock_expired):
"""
Test API set_ca_cert raises exception when loading certificate fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')):
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.return_value = True
mock_load_cert.return_value = crypto.X509()
mock_expired.return_value = True
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
mock_load_cert.assert_called_with(crypto.FILETYPE_PEM, 'MOCKED')
# pylint: disable=R0913
# disabling too many arguments warning
@mock.patch('OpenSSL.crypto.X509.has_expired')
@mock.patch('OpenSSL.crypto.load_certificate')
@mock.patch('OpenSSL.crypto.PKey.check')
@mock.patch('OpenSSL.crypto.load_privatekey')
@mock.patch('edgectl.utils.EdgeUtils.check_if_file_exists')
def test_set_ca_cert_duplicate_id_invalid(self, mock_util_chk, mock_load_pk,
mock_check_pk, mock_load_cert, mock_expired):
"""
Test API set_ca_cert raises exception when loading certificate fails
"""
cert_util = EdgeCertUtil()
mock_util_chk.return_value = True
with patch(OPEN_BUILTIN, mock_open(read_data='MOCKED')):
mock_load_pk.return_value = crypto.PKey()
mock_check_pk.return_value = True
mock_load_cert.return_value = crypto.X509()
mock_expired.return_value = False
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.set_ca_cert('root',
ca_cert_file_path=CA_CERT_FILE_NAME,
ca_root_cert_file_path=CA_OWNER_CERT_FILE_NAME,
ca_root_chain_cert_file_path=CA_CHAIN_CERT_FILE_NAME,
ca_private_key_file_path=CA_PRIVATE_KEY_FILE_NAME,
passphrase='1234')
class TestEdgeCertUtilAPICreateIntCACert(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.create_intermediate_ca_cert"""
def test_create_intermediate_ca_cert_duplicate_ids_invalid(self):
"""
Test API create_intermediate_ca_cert raises exception when invalid validity day values used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('root', 'root', common_name='name')
def test_create_intermediate_ca_cert_validity_days_invalid(self):
"""
Test API create_intermediate_ca_cert raises exception when invalid validity day values used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
for validity in [-1, 0, 366, 1096]:
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name='name',
validity_days_from_now=validity)
def test_create_intermediate_ca_cert_passphrase_invalid(self):
"""
Test API create_intermediate_ca_cert raises exception when passphrase is invalid
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name='name',
passphrase='')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name='name',
passphrase='123')
bad_pass_1024 = 'a' * 1024
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name='name',
passphrase=bad_pass_1024)
def test_create_intermediate_ca_cert_common_name_invalid(self):
"""
Test API create_intermediate_ca_cert raises exception when common name is invalid
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name=None)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name='')
bad_common_name = 'a' * 65
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_intermediate_ca_cert('int', 'root', common_name=bad_common_name)
class TestEdgeCertUtilAPICreateServerCert(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.create_server_cert"""
def test_create_server_cert_duplicate_ids_invalid(self):
"""
Test API create_server_cert raises exception when invalid validity day values used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('root', 'root', host_name='name')
def test_create_server_cert_validity_days_invalid(self):
"""
Test API create_server_cert raises exception when invalid validity day values used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
for validity in [-1, 0, 366, 1096]:
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('server', 'root', host_name='name',
validity_days_from_now=validity)
def test_create_server_cert_passphrase_invalid(self):
"""
Test API create_server_cert raises exception when passphrase is invalid
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('server', 'root', host_name='name', passphrase='')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('server', 'root', host_name='name', passphrase='123')
bad_pass = 'a' * 1024
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('server', 'root', host_name='name', passphrase=bad_pass)
def test_create_server_cert_hostname_invalid(self):
"""
Test API create_server_cert raises exception when hostname is invalid
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('int', 'root')
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('int', 'root', host_name=None)
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('int', 'root', host_name='')
bad_hostname = 'a' * 65
with self.assertRaises(edgectl.errors.EdgeValueError):
cert_util.create_server_cert('int', 'root', host_name=bad_hostname)
class TestEdgeCertUtilAPIExportCertArtifacts(unittest.TestCase):
"""Unit tests for API EdgeCertUtil.export_cert_artifacts_to_dir"""
@mock.patch('edgectl.utils.EdgeUtils.check_if_directory_exists')
def test_export_cert_artifacts_to_dir_incorrect_id_invalid(self, mock_chk_dir):
"""
Test API export_cert_artifacts_to_dir raises exception when invalid id used
"""
cert_util = EdgeCertUtil()
with self.assertRaises(edgectl.errors.EdgeValueError):
mock_chk_dir.return_value = True
cert_util.export_cert_artifacts_to_dir('root', 'some_dir')
@mock.patch('edgectl.utils.EdgeUtils.check_if_directory_exists')
def test_export_cert_artifacts_to_dir_invalid_dir_invalid(self, mock_chk_dir):
"""
Test API export_cert_artifacts_to_dir raises exception when invalid id used
"""
cert_util = EdgeCertUtil()
cert_util.create_root_ca_cert('root', subject_dict=VALID_SUBJECT_DICT)
with self.assertRaises(edgectl.errors.EdgeValueError):
mock_chk_dir.return_value = False
cert_util.export_cert_artifacts_to_dir('root', 'some_dir')
if __name__ == '__main__':
test_classes = [
TestEdgeCertUtilAPIIsValidCertSubject,
TestEdgeCertUtilAPICreateRootCACert,
TestEdgeCertUtilAPISetCACert,
TestEdgeCertUtilAPICreateIntCACert,
TestEdgeCertUtilAPICreateServerCert,
TestEdgeCertUtilAPIExportCertArtifacts,
]
suites_list = []
for test_class in test_classes:
suite = unittest.TestLoader().loadTestsFromTestCase(test_class)
suites_list.append(suite)
SUITE = unittest.TestSuite(suites_list)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| 52.716724
| 100
| 0.628998
| 3,585
| 30,892
| 4.965411
| 0.059693
| 0.042132
| 0.040447
| 0.04247
| 0.880231
| 0.850402
| 0.831639
| 0.802595
| 0.782821
| 0.770069
| 0
| 0.009303
| 0.2971
| 30,892
| 585
| 101
| 52.806838
| 0.810491
| 0.087887
| 0
| 0.678815
| 0
| 0
| 0.069313
| 0.044597
| 0
| 0
| 0
| 0
| 0.134396
| 1
| 0.063781
| false
| 0.075171
| 0.018223
| 0
| 0.100228
| 0.002278
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ceee86cd27fb076337d7403f1cea40c2121d28d3
| 28,033
|
py
|
Python
|
retinanet/model.py
|
jburkow/pytorch-retinanet
|
ba1782ed56d2e97a42eb6c337a4aa5ee4347d1f3
|
[
"Apache-2.0"
] | null | null | null |
retinanet/model.py
|
jburkow/pytorch-retinanet
|
ba1782ed56d2e97a42eb6c337a4aa5ee4347d1f3
|
[
"Apache-2.0"
] | null | null | null |
retinanet/model.py
|
jburkow/pytorch-retinanet
|
ba1782ed56d2e97a42eb6c337a4aa5ee4347d1f3
|
[
"Apache-2.0"
] | null | null | null |
import torch.nn as nn
import torch
import math
import torch.utils.model_zoo as model_zoo
from torchvision.ops import nms
from retinanet.utils import BasicBlock, Bottleneck, BBoxTransform, ClipBoxes
from retinanet.anchors import Anchors
from retinanet import losses
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
class PyramidFeatures(nn.Module):
def __init__(self, C3_size, C4_size, C5_size, feature_size=256):
super(PyramidFeatures, self).__init__()
# upsample C5 to get P5 from the FPN paper
self.P5_1 = nn.Conv2d(C5_size, feature_size, kernel_size=1, stride=1, padding=0)
self.P5_upsampled = nn.Upsample(scale_factor=2, mode='nearest')
self.P5_2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, stride=1, padding=1)
# add P5 elementwise to C4
self.P4_1 = nn.Conv2d(C4_size, feature_size, kernel_size=1, stride=1, padding=0)
self.P4_upsampled = nn.Upsample(scale_factor=2, mode='nearest')
self.P4_2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, stride=1, padding=1)
# add P4 elementwise to C3
self.P3_1 = nn.Conv2d(C3_size, feature_size, kernel_size=1, stride=1, padding=0)
self.P3_2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, stride=1, padding=1)
# "P6 is obtained via a 3x3 stride-2 conv on C5"
self.P6 = nn.Conv2d(C5_size, feature_size, kernel_size=3, stride=2, padding=1)
# "P7 is computed by applying ReLU followed by a 3x3 stride-2 conv on P6"
self.P7_1 = nn.ReLU()
self.P7_2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, stride=2, padding=1)
def forward(self, inputs):
C3, C4, C5 = inputs
P5_x = self.P5_1(C5)
P5_upsampled_x = self.P5_upsampled(P5_x)
P5_x = self.P5_2(P5_x)
P4_x = self.P4_1(C4)
P4_x = P5_upsampled_x + P4_x
P4_upsampled_x = self.P4_upsampled(P4_x)
P4_x = self.P4_2(P4_x)
P3_x = self.P3_1(C3)
P3_x = P3_x + P4_upsampled_x
P3_x = self.P3_2(P3_x)
P6_x = self.P6(C5)
P7_x = self.P7_1(P6_x)
P7_x = self.P7_2(P7_x)
return [P3_x, P4_x, P5_x, P6_x, P7_x]
class RegressionModel(nn.Module):
def __init__(self, num_features_in, num_anchors=9, feature_size=256):
super(RegressionModel, self).__init__()
self.conv1 = nn.Conv2d(num_features_in, feature_size, kernel_size=3, padding=1)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act2 = nn.ReLU()
self.conv3 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act3 = nn.ReLU()
self.conv4 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act4 = nn.ReLU()
self.output = nn.Conv2d(feature_size, num_anchors * 4, kernel_size=3, padding=1)
def forward(self, x):
out = self.conv1(x)
out = self.act1(out)
out = self.conv2(out)
out = self.act2(out)
out = self.conv3(out)
out = self.act3(out)
out = self.conv4(out)
out = self.act4(out)
out = self.output(out)
# out is B x C x W x H, with C = 4*num_anchors
out = out.permute(0, 2, 3, 1)
return out.contiguous().view(out.shape[0], -1, 4)
class ClassificationModel(nn.Module):
def __init__(self, num_features_in, num_anchors=9, num_classes=80, prior=0.01, feature_size=256):
super(ClassificationModel, self).__init__()
self.num_classes = num_classes
self.num_anchors = num_anchors
self.conv1 = nn.Conv2d(num_features_in, feature_size, kernel_size=3, padding=1)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act2 = nn.ReLU()
self.conv3 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act3 = nn.ReLU()
self.conv4 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act4 = nn.ReLU()
self.output = nn.Conv2d(feature_size, num_anchors * num_classes, kernel_size=3, padding=1)
self.output_act = nn.Sigmoid()
def forward(self, x):
out = self.conv1(x)
out = self.act1(out)
out = self.conv2(out)
out = self.act2(out)
out = self.conv3(out)
out = self.act3(out)
out = self.conv4(out)
out = self.act4(out)
out = self.output(out)
out = self.output_act(out)
# out is B x C x W x H, with C = n_classes + n_anchors
out1 = out.permute(0, 2, 3, 1)
batch_size, width, height, channels = out1.shape
out2 = out1.view(batch_size, width, height, self.num_anchors, self.num_classes)
return out2.contiguous().view(x.shape[0], -1, self.num_classes)
class ResNet(nn.Module):
def __init__(self, num_classes, block, layers):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
if block == BasicBlock:
fpn_sizes = [self.layer2[layers[1] - 1].conv2.out_channels, self.layer3[layers[2] - 1].conv2.out_channels,
self.layer4[layers[3] - 1].conv2.out_channels]
elif block == Bottleneck:
fpn_sizes = [self.layer2[layers[1] - 1].conv3.out_channels, self.layer3[layers[2] - 1].conv3.out_channels,
self.layer4[layers[3] - 1].conv3.out_channels]
else:
raise ValueError(f"Block type {block} not understood")
self.fpn = PyramidFeatures(fpn_sizes[0], fpn_sizes[1], fpn_sizes[2])
self.regressionModel = RegressionModel(256)
self.classificationModel = ClassificationModel(256, num_classes=num_classes)
self.anchors = Anchors()
self.regressBoxes = BBoxTransform()
self.clipBoxes = ClipBoxes()
self.focalLoss = losses.FocalLoss()
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
prior = 0.01
self.classificationModel.output.weight.data.fill_(0)
self.classificationModel.output.bias.data.fill_(-math.log((1.0 - prior) / prior))
self.regressionModel.output.weight.data.fill_(0)
self.regressionModel.output.bias.data.fill_(0)
self.freeze_bn()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = [block(self.inplanes, planes, stride, downsample)]
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def freeze_bn(self):
'''Freeze BatchNorm layers.'''
for layer in self.modules():
if isinstance(layer, nn.BatchNorm2d):
layer.eval()
def forward(self, inputs):
if self.training:
img_batch, annotations = inputs
else:
img_batch = inputs
x = self.conv1(img_batch)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x1 = self.layer1(x)
x2 = self.layer2(x1)
x3 = self.layer3(x2)
x4 = self.layer4(x3)
features = self.fpn([x2, x3, x4])
regression = torch.cat([self.regressionModel(feature) for feature in features], dim=1)
classification = torch.cat([self.classificationModel(feature) for feature in features], dim=1)
anchors = self.anchors(img_batch)
if self.training:
return self.focalLoss(classification, regression, anchors, annotations)
else:
transformed_anchors = self.regressBoxes(anchors, regression)
transformed_anchors = self.clipBoxes(transformed_anchors, img_batch)
finalResult = [[], [], []]
finalScores = torch.Tensor([])
finalAnchorBoxesIndexes = torch.Tensor([]).long()
finalAnchorBoxesCoordinates = torch.Tensor([])
if torch.cuda.is_available():
finalScores = finalScores.cuda()
finalAnchorBoxesIndexes = finalAnchorBoxesIndexes.cuda()
finalAnchorBoxesCoordinates = finalAnchorBoxesCoordinates.cuda()
for i in range(classification.shape[2]):
scores = torch.squeeze(classification[:, :, i])
scores_over_thresh = (scores > 0.05)
if scores_over_thresh.sum() == 0:
# return empty tensors to count toward true negatives
return torch.Tensor(), torch.Tensor(), torch.Tensor()
# no boxes to NMS, just continue
# continue
scores = scores[scores_over_thresh]
anchorBoxes = torch.squeeze(transformed_anchors)
anchorBoxes = anchorBoxes[scores_over_thresh]
anchors_nms_idx = nms(anchorBoxes, scores, 0.5)
finalResult[0].extend(scores[anchors_nms_idx])
finalResult[1].extend(torch.tensor([i] * anchors_nms_idx.shape[0]))
finalResult[2].extend(anchorBoxes[anchors_nms_idx])
finalScores = torch.cat((finalScores, scores[anchors_nms_idx]))
finalAnchorBoxesIndexesValue = torch.tensor([i] * anchors_nms_idx.shape[0])
if torch.cuda.is_available():
finalAnchorBoxesIndexesValue = finalAnchorBoxesIndexesValue.cuda()
finalAnchorBoxesIndexes = torch.cat((finalAnchorBoxesIndexes, finalAnchorBoxesIndexesValue))
finalAnchorBoxesCoordinates = torch.cat((finalAnchorBoxesCoordinates, anchorBoxes[anchors_nms_idx]))
return [finalScores, finalAnchorBoxesIndexes, finalAnchorBoxesCoordinates]
### FiLMed RetinaNet classes ###
class FiLMGenerator(nn.Module):
"""
MLP that generates FiLM parameters (gains and biases).
Attributes
----------
n_features : int
Number of non-image feature inputs.
n_channels : int
Number of feature maps to modulate (also, half the number of MLP outputs: n_channels gains + n_channels biases).
n_hidden_features : int
Number of units in the single hidden layer. By default, set to n_channels // 2.
"""
def __init__(self, n_features, n_channels=256, n_hidden_features=None):
super(FiLMGenerator, self).__init__()
self.n_features = n_features
self.n_channels = n_channels
self.n_hidden_features = n_hidden_features if n_hidden_features is None else self.n_channels // 2
# Simple MLP to predict gains and biases from non-image inputs
# Potential improvements: Dropout after each linear layer, LeakyReLU instead of ReLU
self.film_generator = nn.Sequential(
nn.Linear(self.n_features, self.n_hidden_features),
# nn.Dropout(0.1),
nn.ReLU(inplace=True),
nn.Linear(self.n_hidden_features, self.n_hidden_features),
# nn.Dropout(0.2),
nn.ReLU(inplace=True),
nn.Linear(self.n_hidden_features, 2*self.n_channels)
)
def forward(self, x):
# Input shape: (batch_size, n_features)
# Output shape: (batch_size, 2*n_channels)... later decomposed to (batch_size, n_channels) gammas and (batch_size, n_channels) betas
film_params = self.film_generator(x)
return film_params
class FiLMLayer(nn.Module):
"""Layer that performs Featurewise Linear Modulation (FiLM)."""
def __init__(self):
super(FiLMLayer, self).__init__()
def forward(self, F, gammas, betas):
# Repeat (tile) gammas and betas to match shape of feature maps in F: from shape (batch_size, n_channels) -> (batch_size, n_channels, height, width)
gammas = torch.stack([gammas]*F.shape[2], dim=2)
gammas = torch.stack([gammas]*F.shape[3], dim=3)
betas = torch.stack([betas]*F.shape[2], dim=2)
betas = torch.stack([betas]*F.shape[3], dim=3)
return (1 + gammas) * F + betas
class FiLMedRegressionModel(nn.Module):
def __init__(self, num_features_in, num_anchors=9, feature_size=256):
super(FiLMedRegressionModel, self).__init__()
self.film = FiLMLayer()
self.conv1 = nn.Conv2d(num_features_in, feature_size, kernel_size=3, padding=1)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act2 = nn.ReLU()
self.conv3 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act3 = nn.ReLU()
self.conv4 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act4 = nn.ReLU()
self.output = nn.Conv2d(feature_size, num_anchors * 4, kernel_size=3, padding=1)
def forward(self, x, gammas, betas):
# There are infinite ways to do this, but this version applies FiLM once after the first convolution in the regression head
out = self.conv1(x)
out = self.film(out, gammas, betas) # APPLY FiLM!
out = self.act1(out)
out = self.conv2(out)
out = self.act2(out)
out = self.conv3(out)
out = self.act3(out)
out = self.conv4(out)
out = self.act4(out)
out = self.output(out)
# out is B x C x W x H, with C = 4*num_anchors
out = out.permute(0, 2, 3, 1)
return out.contiguous().view(out.shape[0], -1, 4)
class FiLMedClassificationModel(nn.Module):
def __init__(self, num_features_in, num_anchors=9, num_classes=80, prior=0.01, feature_size=256, FiLMed=False):
super(FiLMedClassificationModel, self).__init__()
self.num_classes = num_classes
self.num_anchors = num_anchors
self.film = FiLMLayer()
self.conv1 = nn.Conv2d(num_features_in, feature_size, kernel_size=3, padding=1)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act2 = nn.ReLU()
self.conv3 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act3 = nn.ReLU()
self.conv4 = nn.Conv2d(feature_size, feature_size, kernel_size=3, padding=1)
self.act4 = nn.ReLU()
self.output = nn.Conv2d(feature_size, num_anchors * num_classes, kernel_size=3, padding=1)
self.output_act = nn.Sigmoid()
def forward(self, x, gammas, betas):
# There are infinite ways to do this, but this version applies FiLM once after the first convolution in the classification head
out = self.conv1(x)
out = self.film(out, gammas, betas) # APPLY FiLM!
out = self.act1(out)
out = self.conv2(out)
out = self.act2(out)
out = self.conv3(out)
out = self.act3(out)
out = self.conv4(out)
out = self.act4(out)
out = self.output(out)
out = self.output_act(out)
# out is B x C x W x H, with C = n_classes + n_anchors
out1 = out.permute(0, 2, 3, 1)
batch_size, width, height, channels = out1.shape
out2 = out1.view(batch_size, width, height, self.num_anchors, self.num_classes)
return out2.contiguous().view(x.shape[0], -1, self.num_classes)
class FiLMedResNet(nn.Module):
"""FiLMed version of RetinaNet. FiLM is applied after the first convolution block in the regression and classification head, once for each of the 5 feature pyramid outputs (i.e., 5*2=10 FiLM layers)."""
def __init__(self, num_classes, block, layers):
super(FiLMedResNet, self).__init__()
# Initialize FiLM generators: 10 in total for this specific configuration.
# One FiLM generator (MLP) for each of the 5 feature pyramid outputs (P2-P7) that will be fed through the classification head
self.cls_film_generators = nn.ModuleList([FiLMGenerator(n_features=5, n_hidden_features=128, n_channels=256) for _ in range(5)])
# One FiLM generator (MLP) for each of the 5 feature pyramid outputs (P2-P7) that will be fed through the regression head
self.reg_film_generators = nn.ModuleList([FiLMGenerator(n_features=5, n_hidden_features=128, n_channels=256) for _ in range(5)])
self.inplanes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
if block == BasicBlock:
fpn_sizes = [self.layer2[layers[1] - 1].conv2.out_channels, self.layer3[layers[2] - 1].conv2.out_channels,
self.layer4[layers[3] - 1].conv2.out_channels]
elif block == Bottleneck:
fpn_sizes = [self.layer2[layers[1] - 1].conv3.out_channels, self.layer3[layers[2] - 1].conv3.out_channels,
self.layer4[layers[3] - 1].conv3.out_channels]
else:
raise ValueError(f"Block type {block} not understood")
self.fpn = PyramidFeatures(fpn_sizes[0], fpn_sizes[1], fpn_sizes[2])
# Initialize FiLMed classification and regression heads!
self.regressionModel = FiLMedRegressionModel(256, feature_size=256)
self.classificationModel = FiLMedClassificationModel(256, feature_size=256, num_classes=num_classes)
self.anchors = Anchors()
self.regressBoxes = BBoxTransform()
self.clipBoxes = ClipBoxes()
self.focalLoss = losses.FocalLoss()
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
prior = 0.01
self.classificationModel.output.weight.data.fill_(0)
self.classificationModel.output.bias.data.fill_(-math.log((1.0 - prior) / prior))
self.regressionModel.output.weight.data.fill_(0)
self.regressionModel.output.bias.data.fill_(0)
self.freeze_bn()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = [block(self.inplanes, planes, stride, downsample)]
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def freeze_bn(self):
'''Freeze BatchNorm layers.'''
for layer in self.modules():
if isinstance(layer, nn.BatchNorm2d):
layer.eval()
def forward(self, inputs):
if self.training:
img_batch, metadata_batch, annotations = inputs
else:
img_batch, metadata_batch = inputs
# Generate FiLM parameters for classification head
cls_betas = []
cls_gammas = []
for film_generator in self.cls_film_generators:
film_params = film_generator(metadata_batch)
# Split output into two "chunks": (batch_size, n_channels) gammas and (batch_size, n_channels) betas
betas, gammas = torch.split(film_params, film_generator.n_channels, dim=1)
cls_betas.append(betas)
cls_gammas.append(gammas)
# Create (5, batch_size, n_channels) tensor of gammas and betas, respectively, for classification head (5 for each of the feature pyramid outputs)
cls_betas = torch.stack(cls_betas)
cls_gammas = torch.stack(cls_gammas)
# Generate FiLM parameters for regression head
reg_betas = []
reg_gammas = []
for film_generator in self.reg_film_generators:
film_params = film_generator(metadata_batch)
# Split output into two "chunks": (batch_size, n_channels) gammas and (batch_size, n_channels) betas
betas, gammas = torch.split(film_params, film_generator.n_channels, dim=1)
reg_betas.append(betas)
reg_gammas.append(gammas)
# Create (5, batch_size, n_channels) tensor of gammas and betas, respectively, for regression head (5 for each of the feature pyramid outputs)
reg_betas = torch.stack(reg_betas)
reg_gammas = torch.stack(reg_gammas)
x = self.conv1(img_batch)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x1 = self.layer1(x)
x2 = self.layer2(x1)
x3 = self.layer3(x2)
x4 = self.layer4(x3)
features = self.fpn([x2, x3, x4])
# Feed feature pyramid output + associated FiLM params through regression head
regression = torch.cat([self.regressionModel(features[i], reg_gammas[i], reg_betas[i]) for i in range(len(features))], dim=1)
# Feed feature pyramid output + associated FiLM params through classification head
classification = torch.cat([self.classificationModel(features[i], cls_gammas[i], cls_betas[i]) for i in range(len(features))], dim=1)
anchors = self.anchors(img_batch)
if self.training:
return self.focalLoss(classification, regression, anchors, annotations)
else:
transformed_anchors = self.regressBoxes(anchors, regression)
transformed_anchors = self.clipBoxes(transformed_anchors, img_batch)
finalResult = [[], [], []]
finalScores = torch.Tensor([])
finalAnchorBoxesIndexes = torch.Tensor([]).long()
finalAnchorBoxesCoordinates = torch.Tensor([])
if torch.cuda.is_available():
finalScores = finalScores.cuda()
finalAnchorBoxesIndexes = finalAnchorBoxesIndexes.cuda()
finalAnchorBoxesCoordinates = finalAnchorBoxesCoordinates.cuda()
for i in range(classification.shape[2]):
scores = torch.squeeze(classification[:, :, i])
scores_over_thresh = (scores > 0.05)
if scores_over_thresh.sum() == 0:
# return empty tensors to count toward true negatives
return torch.Tensor(), torch.Tensor(), torch.Tensor()
# no boxes to NMS, just continue
# continue
scores = scores[scores_over_thresh]
anchorBoxes = torch.squeeze(transformed_anchors)
anchorBoxes = anchorBoxes[scores_over_thresh]
anchors_nms_idx = nms(anchorBoxes, scores, 0.5)
finalResult[0].extend(scores[anchors_nms_idx])
finalResult[1].extend(torch.tensor([i] * anchors_nms_idx.shape[0]))
finalResult[2].extend(anchorBoxes[anchors_nms_idx])
finalScores = torch.cat((finalScores, scores[anchors_nms_idx]))
finalAnchorBoxesIndexesValue = torch.tensor([i] * anchors_nms_idx.shape[0])
if torch.cuda.is_available():
finalAnchorBoxesIndexesValue = finalAnchorBoxesIndexesValue.cuda()
finalAnchorBoxesIndexes = torch.cat((finalAnchorBoxesIndexes, finalAnchorBoxesIndexesValue))
finalAnchorBoxesCoordinates = torch.cat((finalAnchorBoxesCoordinates, anchorBoxes[anchors_nms_idx]))
return [finalScores, finalAnchorBoxesIndexes, finalAnchorBoxesCoordinates]
### END FiLMed RetinaNet classes ###
def resnet18(num_classes, pretrained=False, FiLMed=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if FiLMed:
model = FiLMedResNet(num_classes, BasicBlock, [2, 2, 2, 2], **kwargs)
else:
model = ResNet(num_classes, BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet18'], model_dir='.'), strict=False)
return model
def resnet34(num_classes, pretrained=False, FiLMed=False, **kwargs):
"""Constructs a ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if FiLMed:
model = FiLMedResNet(num_classes, BasicBlock, [3, 4, 6, 3], **kwargs)
else:
model = ResNet(num_classes, BasicBlock, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet34'], model_dir='.'), strict=False)
return model
def resnet50(num_classes, pretrained=False, FiLMed=False, **kwargs):
"""Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if FiLMed:
model = FiLMedResNet(num_classes, Bottleneck, [3, 4, 6, 3], **kwargs)
else:
model = ResNet(num_classes, Bottleneck, [3, 4, 6, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet50'], model_dir='.'), strict=False)
return model
def resnet101(num_classes, pretrained=False, FiLMed=False, **kwargs):
"""Constructs a ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if FiLMed:
model = FiLMedResNet(num_classes, Bottleneck, [3, 4, 23, 3], **kwargs)
else:
model = ResNet(num_classes, Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet101'], model_dir='.'), strict=False)
return model
def resnet152(num_classes, pretrained=False, FiLMed=False, **kwargs):
"""Constructs a ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if FiLMed:
model = ResNet(num_classes, Bottleneck, [3, 8, 36, 3], **kwargs)
else:
model = ResNet(num_classes, Bottleneck, [3, 8, 36, 3], **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['resnet152'], model_dir='.'), strict=False)
return model
| 39.594633
| 206
| 0.63361
| 3,588
| 28,033
| 4.790134
| 0.098105
| 0.032641
| 0.017455
| 0.029324
| 0.813289
| 0.787805
| 0.769477
| 0.754291
| 0.738581
| 0.728574
| 0
| 0.036461
| 0.254486
| 28,033
| 707
| 207
| 39.650636
| 0.785923
| 0.133521
| 0
| 0.718062
| 0
| 0
| 0.018938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059471
| false
| 0
| 0.017621
| 0
| 0.140969
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06481549f0492a17438d274b1900496eb682e18e
| 146,045
|
py
|
Python
|
decoding/mask_predict.py
|
ybCliff/VideoCaptioning
|
93fc3b095c970e51e1e24909163a827df98d6ef3
|
[
"MIT"
] | 3
|
2020-05-16T23:59:57.000Z
|
2021-06-14T01:59:41.000Z
|
decoding/mask_predict.py
|
ybCliff/VideoCaptioning
|
93fc3b095c970e51e1e24909163a827df98d6ef3
|
[
"MIT"
] | null | null | null |
decoding/mask_predict.py
|
ybCliff/VideoCaptioning
|
93fc3b095c970e51e1e24909163a827df98d6ef3
|
[
"MIT"
] | 3
|
2020-05-17T00:01:01.000Z
|
2020-07-28T18:04:05.000Z
|
from decoding.strategy_utils import generate_step_with_prob, assign_single_value_long, assign_single_value_byte, assign_multi_value_long, convert_tokens
import models.Constants as Constants
import torch
from tqdm import tqdm
import numpy as np
import json
import math
import matplotlib.pyplot as plt
from matplotlib import cm
import torch.nn.functional as F
def enlarge(info, beam_size, return_view=True):
bsz, *rest_shape = info.shape
if len(rest_shape) == 2:
tmp = info.unsqueeze(1).repeat(1, beam_size, 1, 1)
return tmp.view(bsz * beam_size, *rest_shape) if return_view else tmp
tmp = info.unsqueeze(1).repeat(1, beam_size, 1)
return tmp.view(bsz * beam_size, *rest_shape) if return_view else tmp
def to_sentence(hyp, vocab, break_words=[Constants.PAD], skip_words=[]):
sent = []
for word_id in hyp:
if word_id in skip_words:
continue
if word_id in break_words:
break
sent.append(vocab[word_id])
return ' '.join(sent)
def plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter, teacher_model, split=False):
for n in range(1,2):
sent = []
stu = []
tea = []
overall = []
select_id = n
tmp = tgt_tokens[select_id].tolist()
mask_id = [0] * len(tmp)
for i, token in enumerate(tmp):
if token == Constants.PAD:
break
word = tgt_vocab[str(token)]
tmp1 = token_probs[select_id, i]
tmp2 = corresponding_probs[select_id, i]
#jud = 'True' if tgt_tokens[select_id, i].item() == Constants.MASK else 'False'
#tqdm.write('%s\t%.4f\t%.4f\t%.8f\t%s' % (word, tmp1, tmp2, tmp1 * tmp2, jud))
sent.append('%s' % word)
stu.append("%.2f" %tmp1)
tea.append("%.2f" %tmp2)
overall.append('%.2f' % (math.sqrt(tmp1 * tmp2)))
if i < num_mask[select_id].item():
mask_id[mask_ind[select_id, i].item()] = 1.0
sent.append(str(num_mask[select_id].item()))
tqdm.write(("Step %d: " % (counter)) + ' '.join(sent))
tqdm.write(("Step %d Stu: " % (counter)) + ','.join(stu))
tqdm.write(("Step %d Tea: " % (counter)) + ','.join(tea))
tqdm.write(("Step %d All: " % (counter)) + ','.join(overall))
mask_id = ['%.2f' % item for item in mask_id]
tqdm.write(("Step %d Mas: " % (counter)) + ','.join(mask_id))
stu = [float(item) for item in stu]
tea = [float(item) for item in tea]
overall = [float(item) for item in overall]
if teacher_model is not None:
a = np.array([stu[:-1], tea[:-1], overall[:-1]])
else:
a = np.array([stu[:-1]])
myplot = plt.imshow(a, cmap=cm.Blues, vmin=0, vmax=1)
cbar = plt.colorbar(myplot, shrink=.92, orientation='horizontal')
plt.xticks(())
plt.yticks(())
plt.savefig('./%d_%d.png' % (1 if teacher_model is not None else 0, counter))
plt.show()
if split:
tqdm.write('-----------------------')
'''
class MaskPredict(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = seq_len if self.iterations is None else self.iterations
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens)
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)#, no_masking_desicion=True)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
#tqdm.write("Initialization: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
for counter in range(1, iterations):
ratio = (1.0 - (counter / iterations))
ratio = max(ratio, 0.4)
# Mask
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
if self.plot: plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter, teacher_model, split=True)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
# Interact
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
if self.plot:
plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter+1, teacher_model, split=True)
#lprobs = token_probs.log()
lprobs = (token_probs * corresponding_probs).log()
#eos_mask = tgt_tokens.eq(Constants.EOS)
#non_pad_eos_mask = 1 - (eos_mask + pad_mask).gt(0)
#lengths = non_pad_eos_mask.sum(-1)
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens):
#print(enc_output[0])
decoder_out, *_ = model.decoder(tgt_tokens, enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out))
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
#def select_worst(self, token_probs, num_mask):
# bsz, seq_len = token_probs.size()
# masks = [token_probs[batch, :].topk(max(1, num_mask[batch]), largest=False, sorted=False)[1] for batch in range(bsz)]
# masks = [torch.cat([mask, mask.new(seq_len - mask.size(0)).fill_(mask[0])], dim=0) for mask in masks]
# return torch.stack(masks, dim=0)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def generate(model, teacher_model, encoder_outputs, teacher_encoder_outputs, category, tgt_tokens, tgt_vocab, opt, dict_mapping, length_bias):
strategy = MaskPredict(opt['iterations'], opt['seed'], dict_mapping=dict_mapping)
length_beam_size = opt['length_beam_size']
#gold_target_len = tgt_tokens.ne(Constants.PAD).sum(-1)
gold_target_len = None
#gold_target_len = tgt_tokens.ne(Constants.PAD).sum(-1) if opt['use_gold_target_len'] else None
beam_alpha = opt.get('beam_alpha', 1.0)
#print(beam_alpha)
enc_output, pred_length = encoder_outputs['enc_output'], encoder_outputs['pred_length']
if teacher_encoder_outputs is not None:
teacher_enc_output = teacher_encoder_outputs['enc_output']
if isinstance(teacher_enc_output, list):
teacher_enc_output = teacher_enc_output[0]
else:
teacher_enc_output = None
if isinstance(enc_output, list):
assert len(enc_output) == 1
enc_output = enc_output[0]
bsz = enc_output.size(0)
beam = predict_length_beam(gold_target_len, pred_length, length_beam_size, length_bias)
max_len = beam.max().item()
length_mask = torch.triu(enc_output.new(max_len, max_len).fill_(1).long(), 1)
length_mask = torch.stack([length_mask[beam[batch] - 1] for batch in range(bsz)], dim=0)
tgt_tokens = enc_output.new(bsz, length_beam_size, max_len).fill_(Constants.MASK).long()
tgt_tokens = (1 - length_mask) * tgt_tokens + length_mask * Constants.PAD
tgt_tokens = tgt_tokens.view(bsz * length_beam_size, max_len)
enc_output = enlarge(enc_output, length_beam_size)
category = enlarge(category, length_beam_size)
if teacher_enc_output is not None:
teacher_enc_output = enlarge(teacher_enc_output, length_beam_size)
hypotheses, lprobs, collect_results = strategy.generate(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab)
tgt_lengths = (1 - length_mask).sum(-1) -1
hypotheses = hypotheses.view(bsz, length_beam_size, max_len)
lprobs = lprobs.view(bsz, length_beam_size, max_len)
tgt_lengths = tgt_lengths.view(bsz, length_beam_size)
#tgt_lengths = (1 - length_mask).sum(-1)-1
avg_log_prob = lprobs.sum(-1) / (tgt_lengths.float() ** beam_alpha)
best_lengths = avg_log_prob.max(-1)[1] # [batch_size]
best_lengths = best_lengths.unsqueeze(1).unsqueeze(2).repeat(1, 1, max_len) # [batch_size, 1, max_len]
hypotheses = hypotheses.gather(1, best_lengths).squeeze(1) # [batch_size, max_len]
#lprobs = lprobs.gather(1, best_lengths).squeeze(1) = [batch_size, max_len]
lprobs = None # For speedup
if collect_results:
collect_results = [item.view(bsz, length_beam_size, max_len) for item in collect_results]
#print(collect_results[0][0])
#print(collect_results[1][0])
#print(collect_results[2][0])
collect_results = [item.gather(1, best_lengths).squeeze(1) for item in collect_results]
lprobs = torch.stack(collect_results, dim=1)
return hypotheses, lprobs
hypotheses = torch.stack([hypotheses[b, l, :] for b, l in enumerate(best_lengths)], dim=0)
lprobs = torch.stack([lprobs[b, l, :] for b, l in enumerate(best_lengths)], dim=0)
return hypotheses, lprobs
def predict_length_beam(gold_target_len, predicted_lengths, length_beam_size, length_bias):
if gold_target_len is not None:
beam_starts = gold_target_len - (length_beam_size - 1) // 2
beam_ends = gold_target_len + length_beam_size // 2 + 1
#beam = torch.stack([torch.arange(7, 12, device=beam_starts.device) for batch in range(gold_target_len.size(0))], dim=0)
beam = torch.stack([torch.arange(beam_starts[batch], beam_ends[batch], device=beam_starts.device) for batch in range(gold_target_len.size(0))], dim=0)
else:
beam = predicted_lengths.topk(length_beam_size, dim=1)[1] + length_bias + 1
beam[beam < 4] = 4
beam[beam > 19] = 19
#print(beam)
return beam
'''
class MaskPredict(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = kwargs['opt'].get('collect_best_candidate_iterative_results', False)
opt = kwargs['opt']
self.paradigm = opt.get('paradigm', 'mp') # 'mp', 'l2r', 'r2l', 'lr2m'
self.masking_decision = opt.get('masking_decision', False)
self.no_candidate_decision = opt.get('no_candidate_decision', False)
def generate_mp(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
collect_scores = []
iterations = seq_len if self.iterations is None else self.iterations
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags)
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
#tqdm.write("Iteration 0: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
ratio = (1.0 - (counter / iterations))
#ratio = max(ratio, 0.4)
# Mask
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
if self.plot: plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter, teacher_model, split=True)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
# Interact
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
#tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
if self.plot:
plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter+1, teacher_model, split=True)
#lprobs = token_probs.log()
lprobs = (token_probs * corresponding_probs).log()
#eos_mask = tgt_tokens.eq(Constants.EOS)
#non_pad_eos_mask = 1 - (eos_mask + pad_mask).gt(0)
#lengths = non_pad_eos_mask.sum(-1)
return tgt_tokens, lprobs, (collect_results, collect_scores), None
def generate_sequential(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction, step=1):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
collect_scores = []
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
itrs = [i for i in range(0, seq_len, step)] if direction == 0 else [i for i in range(seq_len-1, -1, -step)]
for counter in itrs:
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
if direction == 0:
masks[:, counter:min(counter+step,seq_len)] = 1
else:
masks[:, max(counter-step, 0):counter] = 1
mask_ind = masks.byte() & non_pad_mask
#print(mask_ind[1].tolist())
tgt_tokens[mask_ind] = Constants.MASK
tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs, _ = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
#lprobs = token_probs.log()
lprobs = (token_probs * corresponding_probs).log()
#eos_mask = tgt_tokens.eq(Constants.EOS)
#non_pad_eos_mask = 1 - (eos_mask + pad_mask).gt(0)
#lengths = non_pad_eos_mask.sum(-1)
return tgt_tokens, lprobs, (collect_results, collect_scores), None
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
if self.paradigm == 'mp':
return self.generate_mp(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'l2r':
return self.generate_sequential(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction=0)
elif self.paradigm == 'r2l':
return self.generate_sequential(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction=1)
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, tags):
#print(enc_output[0])
decoder_out, *_ = model.decoder(tgt_tokens, enc_output, category, tags=tags)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out))
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, decision=True):
if teacher_model is None or not decision:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def generate(model, teacher_model, encoder_outputs, teacher_encoder_outputs, category, tgt_tokens, tgt_vocab, opt, dict_mapping, length_bias, tags):
if opt['method'] == 'mp':
func = MaskPredict
elif opt['method'] == 'direct':
func = NVA
elif opt['method'] == 'ap':
func = AllPredict
elif opt['method'] == 'signal' or opt['method'] == 'signal2':
func = Signal
elif opt['method'] == 'signal3':
func = Signal3
elif opt['method'] == 'nv':
func = NV
elif opt['method'] == 'ms':
func = MS
strategy = func(opt['iterations'], opt['seed'], dict_mapping=dict_mapping, masking_ratio=opt['masking_ratio'], opt=opt)
length_beam_size = opt['length_beam_size']
if opt.get('load_generated_captions', False):
gold_target_len = tgt_tokens.ne(Constants.PAD).sum(-1)
else:
gold_target_len = None
#gold_target_len = tgt_tokens.ne(Constants.PAD).sum(-1) if opt['use_gold_target_len'] else None
beam_alpha = opt.get('beam_alpha', 1.0)
#print(beam_alpha)
enc_output, pred_length = encoder_outputs['enc_output'], encoder_outputs['pred_length']
if teacher_encoder_outputs is not None:
teacher_enc_output = teacher_encoder_outputs['enc_output']
if isinstance(teacher_enc_output, list):
teacher_enc_output = teacher_enc_output[0]
else:
teacher_enc_output = None
if isinstance(enc_output, list):
assert len(enc_output) == 1
enc_output = enc_output[0]
bsz = enc_output.size(0)
beam = predict_length_beam(gold_target_len, pred_length, length_beam_size, length_bias)
max_len = beam.max().item()
length_mask = torch.triu(enc_output.new(max_len, max_len).fill_(1).long(), 1)
length_mask = torch.stack([length_mask[beam[batch] - 1] for batch in range(bsz)], dim=0)
if gold_target_len is not None:
tgt_tokens = tgt_tokens[:, :max_len]
tgt_tokens[tgt_tokens==Constants.PAD] = Constants.MASK
tgt_tokens = tgt_tokens.unsqueeze(1).repeat(1, length_beam_size, 1)
else:
tgt_tokens = enc_output.new(bsz, length_beam_size, max_len).fill_(Constants.MASK if not opt.get('use_eos',False) else Constants.EOS).long()
tgt_tokens = (1 - length_mask) * tgt_tokens + length_mask * Constants.PAD
#print(tgt_tokens[0])
tgt_tokens = tgt_tokens.view(bsz * length_beam_size, max_len)
enc_output = enlarge(enc_output, length_beam_size)
category = enlarge(category, length_beam_size)
if tags is not None:
tags = enlarge(tags, length_beam_size)
if teacher_enc_output is not None:
teacher_enc_output = enlarge(teacher_enc_output, length_beam_size)
hypotheses, lprobs, collect_results, visual_mask = strategy.generate(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
if visual_mask is not None:
visual_mask = visual_mask.view(bsz, length_beam_size).long()
tgt_lengths = (1 - length_mask).sum(-1) - visual_mask
else:
tgt_lengths = (1 - length_mask).sum(-1)
hypotheses = hypotheses.view(bsz, length_beam_size, max_len)
lprobs = lprobs.view(bsz, length_beam_size, max_len)
tgt_lengths = tgt_lengths.view(bsz, length_beam_size)
#tgt_lengths = (1 - length_mask).sum(-1)-1
avg_log_prob = lprobs.sum(-1) / (tgt_lengths.float() ** beam_alpha)
best_lengths = avg_log_prob.max(-1)[1] # [batch_size]
best_lengths = best_lengths.unsqueeze(1).unsqueeze(2).repeat(1, 1, max_len) # [batch_size, 1, max_len]
hypotheses = hypotheses.gather(1, best_lengths).squeeze(1) # [batch_size, max_len]
#lprobs = lprobs.gather(1, best_lengths).squeeze(1) = [batch_size, max_len]
lprobs = None # For speedup
assert isinstance(collect_results, tuple)
if collect_results[0]:
sents, scores = collect_results
if not opt.get('not_only_best_candidate', False) and not opt.get('collect_last', False):
sents = [item.view(bsz, length_beam_size, max_len) for item in sents]
sents = [item.gather(1, best_lengths).squeeze(1) for item in sents]
scores = [item.view(bsz, length_beam_size, max_len) for item in scores]
scores = [item.gather(1, best_lengths).squeeze(1) for item in scores]
lprobs = (torch.stack(sents, dim=1), torch.stack(scores, dim=1))
return hypotheses, lprobs
hypotheses = torch.stack([hypotheses[b, l, :] for b, l in enumerate(best_lengths)], dim=0)
lprobs = torch.stack([lprobs[b, l, :] for b, l in enumerate(best_lengths)], dim=0)
return hypotheses, lprobs
def predict_length_beam(gold_target_len, predicted_lengths, length_beam_size, length_bias):
if gold_target_len is not None:
beam_starts = gold_target_len - (length_beam_size - 1) // 2
beam_ends = gold_target_len + length_beam_size // 2 + 1
#beam = torch.stack([torch.arange(7, 12, device=beam_starts.device) for batch in range(gold_target_len.size(0))], dim=0)
beam = torch.stack([torch.arange(beam_starts[batch], beam_ends[batch], device=beam_starts.device) for batch in range(gold_target_len.size(0))], dim=0)
else:
beam = predicted_lengths.topk(length_beam_size, dim=1)[1] + length_bias
beam[beam < 4] = 4
beam[beam > 19] = 19
#print(beam)
return beam
'''
class NVA(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = seq_len if self.iterations is None else self.iterations
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder1, enc_output, category, tgt_tokens, model.tgt_word_prj)
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)#, no_masking_desicion=True)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
tqdm.write("Iteration 0: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
for counter in range(1, iterations):
ratio = (1.0 - (counter / iterations))
ratio = max(ratio, 0.4)
# Mask
if counter == 1:
mask_ind = tgt_tokens.eq(Constants.MASK)
else:
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
if self.plot: plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter, teacher_model, split=True)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder2, enc_output, category, tgt_tokens, model.tgt_word_prj)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
# Interact
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
if self.plot:
plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter+1, teacher_model, split=True)
#lprobs = token_probs.log()
lprobs = (token_probs * corresponding_probs).log()
#eos_mask = tgt_tokens.eq(Constants.EOS)
#non_pad_eos_mask = 1 - (eos_mask + pad_mask).gt(0)
#lengths = non_pad_eos_mask.sum(-1)
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, decoder, enc_output, category, tgt_tokens, tgt_word_prj):
#print(enc_output[0])
decoder_out, *_ = decoder(tgt_tokens, enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
tgt_tokens, token_probs, all_probs = generate_step_with_prob(tgt_word_prj(decoder_out))
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
'''
class NVA(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
self.masking_ratio = kwargs['masking_ratio']
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder1, enc_output, category, tgt_tokens, model.tgt_word_prj)
tgt_tokens[pad_mask] = Constants.PAD
#tqdm.write("Iteration 0: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
if iterations > 1:
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder2, enc_output, category, tgt_tokens, model.tgt_word_prj)
tgt_tokens[pad_mask] = Constants.PAD
#tqdm.write("Iteration 1: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
token_probs[pad_mask] = 1.0
for counter in range(2, iterations):
ratio = (1.0 - (counter / iterations))
# Mask
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder2, enc_output, category, tgt_tokens, model.tgt_word_prj)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
#tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, decoder, enc_output, category, tgt_tokens, tgt_word_prj, zeros=[]):
#print(enc_output[0])
decoder_out, *_ = decoder(tgt_tokens, enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
tgt_tokens, token_probs, all_probs = generate_step_with_prob(tgt_word_prj(decoder_out), zeros=zeros)
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
class AllPredict(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = seq_len if self.iterations is None else self.iterations
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder1, enc_output, category, tgt_tokens, model.tgt_word_prj, zeros=[Constants.MASK])
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)#, no_masking_desicion=True)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
tqdm.write("Iteration 0: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
for counter in range(1, iterations):
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model.decoder.decoder2, enc_output, category, tgt_tokens, model.tgt_word_prj)
token_probs[non_pad_mask] = new_token_probs[non_pad_mask]
tgt_tokens[non_pad_mask] = new_tgt_tokens[non_pad_mask]
# Interact
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
if self.plot:
plot(tgt_tokens, tgt_vocab, token_probs, corresponding_probs, num_mask, mask_ind, counter+1, teacher_model, split=True)
#lprobs = token_probs.log()
lprobs = (token_probs * corresponding_probs).log()
#eos_mask = tgt_tokens.eq(Constants.EOS)
#non_pad_eos_mask = 1 - (eos_mask + pad_mask).gt(0)
#lengths = non_pad_eos_mask.sum(-1)
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, decoder, enc_output, category, tgt_tokens, tgt_word_prj, zeros=[]):
#print(enc_output[0])
decoder_out, *_ = decoder(tgt_tokens, enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
tgt_tokens, token_probs, all_probs = generate_step_with_prob(tgt_word_prj(decoder_out), zeros=zeros)
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
class Signal(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
self.masking_ratio = kwargs['masking_ratio']
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
#tqdm.write("Initilazation: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, signal=0)
tgt_tokens[pad_mask] = Constants.PAD
tqdm.write("Iteration 0: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
if iterations > 1:
tgt_tokens, token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, signal=1)
tgt_tokens[pad_mask] = Constants.PAD
tqdm.write("Iteration 1: " + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
token_probs[pad_mask] = 1.0
for counter in range(2, iterations):
ratio = (1.0 - (counter / iterations))
#ratio = max(ratio, 0.4)
# Mask
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, signal=1)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, signal, zeros=[]):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
return tgt_tokens, token_probs, all_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
'''
class Signal3(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab):
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens.clone()
t1[mask_ind], t2[mask_ind] = self.visual_tag, self.nonvisual_tag
t1, t1_probs, copy1 = self.generate_non_autoregressive(model, enc_output, category, t1, pad_mask, signal=0, tag_replace=[self.visual_tag, self.revision_tag])
tqdm.write(" Visual : " + to_sentence(t1[0].tolist(), tgt_vocab))
t2, t2_probs, copy2 = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, signal=1, tag_replace=[self.nonvisual_tag, self.revision_tag])
tqdm.write(" Non Visual : " + to_sentence(t2[0].tolist(), tgt_vocab))
ind_blank = t1.eq(self.visual_tag) & t2.eq(self.nonvisual_tag)
ind = t2_probs > t1_probs
t1[ind] = t2[ind]
t1_probs[ind] = t2_probs[ind]
t1_probs[ind_blank] = torch.max(copy1[ind_blank], copy2[ind_blank])
tqdm.write(" Fusion : " + to_sentence(t1[0].tolist(), tgt_vocab))
return t1, t1_probs
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
tgt_tokens, token_probs = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab)
for counter in range(1, iterations):
ratio = (1.0 - (counter / iterations))
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
tgt_tokens, token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, signal=2)
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, signal, zeros=[], tag_replace=None):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
class Signal3(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab):
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens.clone()
t1[mask_ind], t2[mask_ind] = self.visual_tag, self.nonvisual_tag
t1, t1_probs, copy1 = self.generate_non_autoregressive(model, enc_output, category, t1, pad_mask, signal=0, tag_replace=[self.revision_tag, self.revision_tag])
tqdm.write(" Visual : " + to_sentence(t1[0].tolist(), tgt_vocab))
t2, t2_probs, copy2 = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, signal=1, tag_replace=[self.revision_tag, self.revision_tag])
tqdm.write(" Non Visual : " + to_sentence(t2[0].tolist(), tgt_vocab))
ind_blank = t1.eq(self.revision_tag) & t2.eq(self.revision_tag)
ind = t2_probs > t1_probs
t1[ind] = t2[ind]
t1_probs[ind] = t2_probs[ind]
t1_probs[ind_blank] = 0.0
tqdm.write(" Fusion : " + to_sentence(t1[0].tolist(), tgt_vocab))
return t1, t1_probs
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
tgt_tokens, token_probs = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab)
for counter in range(1, iterations):
ratio = (1.0 - (counter / iterations))
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
tqdm.write(("Iteration %d_0: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, signal=2)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d_1: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, signal, zeros=[], tag_replace=None):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
'''
class Signal3(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab):
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens.clone()
t1[mask_ind], t2[mask_ind] = self.visual_tag, self.nonvisual_tag
t1, t1_probs, copy1 = self.generate_non_autoregressive(model, enc_output, category, t1, pad_mask, signal=0, tag_replace=[self.visual_tag, self.revision_tag])
tqdm.write(" Visual : " + to_sentence(t1[0].tolist(), tgt_vocab))
t2, t2_probs, copy2 = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, signal=0, tag_replace=[self.nonvisual_tag, self.revision_tag])
tqdm.write(" Non Visual : " + to_sentence(t2[0].tolist(), tgt_vocab))
ind_blank = t1.eq(self.visual_tag) & t2.eq(self.nonvisual_tag)
ind = t2_probs > t1_probs
t1[ind] = t2[ind]
t1_probs[ind] = t2_probs[ind]
t1_probs[ind_blank] = 0.0 #torch.max(copy1[ind_blank], copy2[ind_blank])
tqdm.write(" Fusion : " + to_sentence(t1[0].tolist(), tgt_vocab))
return t1, t1_probs
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
tgt_tokens, token_probs = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab)
for counter in range(1, iterations):
ratio = (1.0 - (counter / iterations))
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, signal=1)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[0].tolist(), tgt_vocab))
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, signal, zeros=[], tag_replace=None):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
class NV(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
self.masking_decision = opt.get('masking_decision', False)
self.no_candidate_decision = opt.get('no_candidate_decision', False)
self.collect_best_candidate_iterative_results = opt.get('collect_best_candidate_iterative_results', False)
self.collect_last = opt.get('collect_last', False)
self.scale = opt.get('nv_scale', 0.0)
self.fixed_iterations = opt.get('fixed_iterations', -1)
self.load_generated_captions = opt.get('load_generated_captions', False)
if self.fixed_iterations != -1: assert self.scale > 0
#assert self.fixed_iterations <= self.iterations - 2
self.paradigm = opt.get('paradigm', 'mp') # 'mp', 'l2r', 'r2l', 'lr2m'
self.q = opt.get('q', 1)
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags):
if self.load_generated_captions:
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1, return_all_probs=True)
token_probs = all_probs.gather(2, tgt_tokens.unsqueeze(2)).squeeze(2) / 3
token_probs[pad_mask] = 1.0
return tgt_tokens, token_probs, None
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens
t1[mask_ind] = self.visual_tag
t1, t1_probs = self.generate_non_autoregressive(model, enc_output, category, t1, pad_mask, tags, signal=0)
if self.scale == 100:
#token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
#token_probs[pad_mask] = 1.0
#return tgt_tokens, token_probs, None
t1_probs[t1.eq(Constants.MASK)] = 0.0
return t1, t1_probs, None
#tqdm.write(" Visual : " + to_sentence(t1[1].tolist(), tgt_vocab))
#tqdm.write(" Visual : " + ' '.join([('%.3f'%item if item!=1.0 else '') for item in t1_probs[1].tolist()]))
t2, t2_probs = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, tags, signal=1)
#tqdm.write(" Mask : " + to_sentence(t2[1].tolist(), tgt_vocab))
#tqdm.write(" Mask : " + ' '.join([('%.3f'%item if item!=1.0 else '') for item in t2_probs[1].tolist()]))
if self.scale == 0:
ind = None
elif self.scale == 100:
t1_probs[t1.eq(Constants.MASK)] = 0.0
t2 = t1
t2_probs = t1_probs
ind = None
elif self.scale == 1000:
t1_probs[t1.eq(Constants.MASK)] = 0.0
t1[pad_mask] = Constants.MASK
ind = t1.ne(Constants.MASK)
not_equal = (t2[ind] != t1[ind])
tmp_t = t2[ind].clone()
tmp_t[not_equal] = t1[ind][not_equal]
tmp_probs = t2_probs[ind].clone()
tmp_probs[not_equal] = 2 * t1_probs[ind][not_equal]
print(not_equal.sum().item())
t2[ind] = tmp_t
t2_probs[ind] = tmp_probs
t2_probs[t2_probs>1.0] = 1.0
'''
equal = (t2[ind] = t1[ind])
tmp_probs = t2_probs[ind].clone()
tmp_probs[equal] += t1_probs[ind][equal]
t2[ind] = t1[ind]
t2_probs[ind] = tmp_probs #(t2_probs[ind]+t1_probs[ind])/2 #torch.sqrt(t2_probs[ind]*t1_probs[ind])
t2_probs[t2_probs>1.0] = 1.0
'''
elif self.scale == 10000:
t1_probs[t1.eq(Constants.MASK)] = 0.0
t2 = t1
t2_probs = t1_probs
ind = None
else:
t1_probs[t1.eq(Constants.MASK)] = 0.0
#ind = t1_probs > t2_probs
t1[pad_mask] = Constants.MASK
ind = t1.ne(Constants.MASK)
t2[ind] = t1[ind]
#t2_probs[ind] = t1_probs[ind]
t2_probs[ind] = self.scale*t1_probs[ind]
t2_probs[t2_probs>1.0] = 1.0
#tqdm.write(" Fusion : " + to_sentence(t2[1].tolist(), tgt_vocab))
return t2, t2_probs, ind
#return t1, t1_probs
def generate_mp(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens1 = seq_len - pad_mask.sum(dim=1)
iterations = self.iterations if self.scale != 100 else self.iterations + 1
#tqdm.write(("Iteration 0 : ") + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
visual_probs = token_probs[visual_mask]
seq_lens2 = seq_lens1 - visual_mask.sum(-1) if visual_mask is not None else seq_lens1
#if visual_mask is not None:
# seq_lens = seq_lens - visual_mask.sum(-1)
#print(visual_mask.long().sum(-1).float())
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dte: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in corresponding_probs[1].tolist()]))
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
if self.fixed_iterations != -1:
if counter - 1 != self.fixed_iterations:
token_probs[visual_mask] = 1.0
#seq_lens = seq_lens2
seq_lens = seq_lens1
else:
token_probs[visual_mask] = visual_probs
seq_lens = seq_lens1
else:
seq_lens = seq_lens1
if self.scale == 100:
if counter == 1:
mask_ind = (tgt_tokens == Constants.MASK)
else:
#ratio = max((1.0 - (counter / iterations)), 0.3)
ratio = (1.0 - (counter / iterations))
#ratio = (1.0 - ((counter-1) / (iterations-1)))
#ratio = 0.4
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
else:
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = (1.0 - (counter / iterations))
#ratio = min((1.0 - (counter / iterations)), 0.7)
if self.load_generated_captions:
ratio *= 0.01
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1)
#tqdm.write(("Iteration %d0 : " % counter) + to_sentence(new_tgt_tokens[1].tolist(), tgt_vocab))
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
#if counter == iterations - 1:
# corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
# corresponding_probs[pad_mask] = 1.0
# collect_scores.append((token_probs * corresponding_probs).clone())
#else:
collect_scores.append(token_probs.clone())
if self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_ap(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens1 = seq_len - pad_mask.sum(dim=1)
iterations = self.iterations if self.scale != 100 else self.iterations + 1
#tqdm.write(("Iteration 0 : ") + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
visual_probs = token_probs[visual_mask]
seq_lens2 = seq_lens1 - visual_mask.sum(-1) if visual_mask is not None else seq_lens1
#if visual_mask is not None:
# seq_lens = seq_lens - visual_mask.sum(-1)
#print(visual_mask.long().sum(-1).float())
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dte: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in corresponding_probs[1].tolist()]))
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
if self.fixed_iterations != -1:
if counter - 1 != self.fixed_iterations:
token_probs[visual_mask] = 1.0
#seq_lens = seq_lens2
seq_lens = seq_lens1
else:
token_probs[visual_mask] = visual_probs
seq_lens = seq_lens1
else:
seq_lens = seq_lens1
if self.scale == 100:
if counter == 1:
mask_ind = (tgt_tokens == Constants.MASK)
else:
#ratio = max((1.0 - (counter / iterations)), 0.3)
ratio = (1.0 - (counter / iterations))
#ratio = (1.0 - ((counter-1) / (iterations-1)))
#ratio = 0.4
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
else:
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = (1.0 - (counter / iterations))
#ratio = min((1.0 - (counter / iterations)), 0.7)
if self.load_generated_captions:
ratio *= 0.01
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1)
#tqdm.write(("Iteration %d0 : " % counter) + to_sentence(new_tgt_tokens[1].tolist(), tgt_vocab))
# Predict
token_probs, tgt_tokens = new_token_probs, new_tgt_tokens
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
if counter == iterations - 1:
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
collect_scores.append((token_probs * corresponding_probs).clone())
else:
collect_scores.append(token_probs.clone())
if self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_sequential(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction, step=1):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
visual_mask = tgt_tokens.ne(Constants.MASK) & non_pad_mask
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
def get_mask_ind(tgt_tokens, seq_lens):
all_mask_ind = []
for i in range(tgt_tokens.size(0)):
item = [j for j in range(seq_lens[i]) if tgt_tokens[i, j] == Constants.MASK]
all_mask_ind.append(item)
return all_mask_ind
def select_left(all_mask_ind, current, step):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = all_mask_ind[i][current:min(current+step,len(all_mask_ind[i]))] if current < len(all_mask_ind[i]) else []
masks[i, ind] = 1
return masks.byte()
all_mask_ind = get_mask_ind(tgt_tokens, seq_lens)
itrs = [i for i in range(0, seq_len, step)] if direction == 0 else [i for i in range(seq_len-1, -1, -step)]
for counter in itrs:
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
'''
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
if direction == 0:
masks[:, counter:min(counter+step,seq_len)] = 1
else:
masks[:, max(counter-step, 0):counter] = 1
mask_ind = masks.byte() & non_pad_mask
'''
mask_ind = select_left(all_mask_ind, counter, step)
if mask_ind.sum() == 0:
break
#print(mask_ind[1].tolist())
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for i in range(self.iterations):
if i == 0 and visual_mask is not None:
mask_ind = visual_mask
else:
refine_ratio = 0.4 * (1.0 - (i / self.iterations))
num_mask = (seq_lens.float() * refine_ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
'''
def get_array_split(self, seq_lens, iterations):
res = []
for i in range(seq_lens.size(0)):
tmp = np.array_split(np.arange(seq_lens[i].cpu()), iterations)
#print(tmp)
res.append(tmp)
return res
def get_mask_ind_from_array_split(self, tgt_tokens, array_split_info, index):
masks = torch.zeros(*tgt_tokens.shape, device=tgt_tokens.device)
for i in range(tgt_tokens.size(0)):
masks[i, array_split_info[i][index]] = 1
return masks.byte()
def generate_sequential(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction, step=1):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, _ = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
#visual_mask = tgt_tokens.ne(Constants.MASK)
#seq_lens = tgt_tokens.eq(Constants.MASK).sum(dim=1)
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
array_split_info = self.get_array_split(seq_lens, step)
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(step):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
mask_ind = self.get_mask_ind_from_array_split(tgt_tokens, array_split_info, counter if direction == 0 else (-(counter+1)))
#print(mask_ind.sum(1))
#print(mask_ind[1].tolist())
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for i in range(self.iterations):
refine_ratio = 0.4 * (1.0 - (i / self.iterations))
num_mask = (seq_lens.float() * refine_ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
'''
def generate_easy_first(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, step=1, refine_ratio=0.2):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
#visual_mask = tgt_tokens.ne(Constants.MASK)
visual_mask = tgt_tokens.ne(Constants.MASK) & tgt_tokens.ne(Constants.PAD)
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
visual_mask = None
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
iterations = tgt_tokens.eq(Constants.MASK).sum(-1).max() / step
print(iterations)
def select_most_confidence(token_probs, mask_ind, step):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
token_probs[~mask_ind] = 0
remain_length = mask_ind.sum(-1)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(min(step, remain_length[i]), largest=True, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
counter = 0
pre = 0
while True:
counter += 1
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
mask_ind = tgt_tokens.eq(Constants.MASK)
remain = mask_ind.sum()
if remain == 0 or pre == remain:
break
pre = remain
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
most_confidence_ind = select_most_confidence(new_token_probs, mask_ind, step)
#tqdm.write(("Iteration %dind: " % counter) + ' '.join([('%d'%item) for item in most_confidence_ind[1].tolist()]))
token_probs[most_confidence_ind] = new_token_probs[most_confidence_ind]
tgt_tokens[most_confidence_ind] = new_tgt_tokens[most_confidence_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for i in range(self.iterations):
if i == 0 and visual_mask is not None:
mask_ind = visual_mask
else:
num_mask = (seq_lens.float() * refine_ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
'''
def easy_first_decode_step(self, tgt_tokens, token_probs, model, enc_output, category, pad_mask, tags, active_inst_idx_list, q):
def prepare_partial_input(data, active_inst_idx_list):
assert type(data) == list
new_data = []
for item in data:
if item is None:
new_data.append(None)
else:
new_data.append(item.index_select(0, active_inst_idx_list))
return new_data
def select_most_confidence(token_probs, mask_ind, q):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
token_probs[~mask_ind] = 0
remain_length = mask_ind.sum(-1)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(min(q, remain_length[i]), largest=True, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def collect_active_inst_idx_list(tgt_tokens, ori_active_inst_idx_list):
active_inst_idx_list = []
assert tgt_tokens.size(0) == len(ori_active_inst_idx_list)
for i in range(tgt_tokens.size(0)):
is_inst_complete = (tgt_tokens[i].eq(Constants.MASK).gt(0).sum() == 0)
if not is_inst_complete:
active_inst_idx_list.append(ori_active_inst_idx_list[i])
return torch.LongTensor(active_inst_idx_list).to(ori_active_inst_idx_list.device)
enc_output, category, tgt_tokens, token_probs, pad_mask, tags = prepare_partial_input(
[enc_output, category, tgt_tokens, token_probs, pad_mask, tags], active_inst_idx_list
)
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
# update the most confident q tokens among the unkonwn tokens
mask_ind = tgt_tokens.eq(Constants.MASK)
most_confidence_ind = select_most_confidence(new_token_probs, mask_ind, q)
token_probs[most_confidence_ind] = new_token_probs[most_confidence_ind]
tgt_tokens[most_confidence_ind] = new_tgt_tokens[most_confidence_ind]
# update the imcompleted instance
active_inst_idx_list = collect_active_inst_idx_list(tgt_tokens, active_inst_idx_list)
return active_inst_idx_list, tgt_tokens, token_probs
def generate_easy_first(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, step=1, refine_ratio=0.2):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
#visual_mask = tgt_tokens.ne(Constants.MASK)
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
visual_mask = tgt_tokens.ne(Constants.MASK) & tgt_tokens.ne(Constants.PAD)
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
active_inst_idx_list = torch.LongTensor(list(range(tgt_tokens.size(0)))).to(tgt_tokens.device)
while True:
# all instances have finished, i.e., there is no more <mask> token
if active_inst_idx_list.size(0) == 0:
break
new_active_inst_idx_list, new_tgt_tokens, new_token_probs = self.easy_first_decode_step(
tgt_tokens,
token_probs,
model,
enc_output,
category,
pad_mask,
tags,
active_inst_idx_list,
step
)
# update
tgt_tokens[active_inst_idx_list] = new_tgt_tokens
token_probs[active_inst_idx_list] = new_token_probs
# save results if we need
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
# go to next round until all the instances are done
active_inst_idx_list = new_active_inst_idx_list
for i in range(self.iterations):
if i == 0:
mask_ind = visual_mask
else:
num_mask = (seq_lens.float() * refine_ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_easy_first(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, step=1, refine_ratio=0.2):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
#visual_mask = tgt_tokens.ne(Constants.MASK)
seq_lens = tgt_tokens.eq(Constants.MASK).sum(dim=1)
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
visual_mask = tgt_tokens.ne(Constants.MASK) & tgt_tokens.ne(Constants.PAD)
array_split_info = self.get_array_split(seq_lens, step)
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
def select_most_confidence(token_probs, mask_ind, array_split_info, index):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
token_probs[~mask_ind] = 0
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(len(array_split_info[i][index]), largest=True, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
for counter in range(step):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
mask_ind = tgt_tokens.eq(Constants.MASK)
if mask_ind.sum(-1).gt(0).sum() == 0:
break
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags,
return_all_probs=True
)
most_confidence_ind = select_most_confidence(new_token_probs, mask_ind, array_split_info, counter)
#tqdm.write(("Iteration %dind: " % counter) + ' '.join([('%d'%item) for item in most_confidence_ind[1].tolist()]))
token_probs[most_confidence_ind] = new_token_probs[most_confidence_ind]
tgt_tokens[most_confidence_ind] = new_tgt_tokens[most_confidence_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for i in range(self.iterations):
if i == 0:
mask_ind = visual_mask
else:
num_mask = (seq_lens.float() * refine_ratio).long()
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enc_output,
category,
tgt_tokens,
pad_mask,
tags)
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
'''
def generate_merge(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
if self.scale == 100:
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
#visual_mask = tgt_tokens.ne(Constants.MASK)
else:
token_probs = tgt_tokens.new(*tgt_tokens.shape).fill_(0).float()
token_probs[pad_mask] = 1.0
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
def select_merge(lens, tgt, idx):
masks = torch.zeros(*tgt.shape, device=tgt.device)
left = idx
right = lens -1 - idx
for j in range(right.size(0)):
if left > right[j]:
continue
elif left < right[j]:
masks[j, right[j]] = 1
masks[j, left] = 1
return masks.byte()
total_iteration = (seq_len+1)//2
for i in range(total_iteration):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
mask_ind = select_merge(seq_lens, tgt_tokens, i)
tgt_tokens[mask_ind] = Constants.MASK
tqdm.write(("Iteration %d1 : " % i) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d2 : " % i) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_parallel_easy_first(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
non_pad_mask = tgt_tokens.ne(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
iterations = self.iterations if self.scale != 100 else self.iterations + 1
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
tqdm.write(("Iteration %dte: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in corresponding_probs[1].tolist()]))
tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
'''
if self.scale == 100:
if counter == 1:
mask_ind = (tgt_tokens == Constants.MASK)
else:
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = (1.0 - (counter / iterations))
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
else:
'''
mask_ind = self.select_parallel_easy_first(token_probs * corresponding_probs)
new_input = enlarge(tgt_tokens, seq_len, return_view=False)
new_input[mask_ind] = Constants.MASK
new_input = new_input.view(bsz * seq_len, seq_len)
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(
model,
enlarge(enc_output, seq_len),
enlarge(category, seq_len),
new_input,
enlarge(pad_mask, seq_len),
enlarge(tags, seq_len)
)
idx = torch.arange(0, seq_len, device=tgt_tokens.device).unsqueeze(0).repeat(bsz, 1).unsqueeze(2)
new_tgt_tokens = new_tgt_tokens.view(bsz, seq_len, seq_len).gather(2, idx).squeeze(-1)
new_token_probs = new_token_probs.view(bsz, seq_len, seq_len).gather(2, idx).squeeze(-1)
tgt_tokens = new_tgt_tokens
tgt_tokens[pad_mask] = Constants.PAD
token_probs = new_token_probs
token_probs[pad_mask] = 1.0
tqdm.write(("Iteration %d : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def select_parallel_easy_first(self, token_probs):
# token_probs [batch_size * B, seq_len]
bsz, seq_len = token_probs.shape
res, res_idx = token_probs.sort(-1)
masks = torch.zeros((bsz, seq_len, seq_len), device=res_idx.device)
for i in range(bsz):
tmp = res_idx[i]
for j in range(seq_len):
masks[i, tmp[j], tmp[:j+1]] = 1
return masks.byte()
def generate_mp_refresh(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
iterations = self.iterations if self.scale != 100 else self.iterations + 1
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
visual_probs = token_probs[visual_mask]
tmp_token_probs = token_probs.clone()
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dte: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in corresponding_probs[1].tolist()]))
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
if self.scale == 100:
if counter == 1:
mask_ind = (tgt_tokens == Constants.MASK)
else:
#ratio = max((1.0 - (counter / iterations)), 0.3)
ratio = (1.0 - (counter / iterations))
#ratio = 0.4
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
else:
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = (1.0 - (counter / iterations))
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1, return_all_probs=True)
#print(all_probs.shape, tgt_tokens.shape)
#non_mask_ind = ~mask_ind
#token_probs[non_mask_ind] = all_probs.gather(2, tgt_tokens[non_mask_ind].unsqueeze(2)).squeeze(2)
#tqdm.write(("Iteration %d0 : " % counter) + to_sentence(new_tgt_tokens[1].tolist(), tgt_vocab))
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tmp_token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
token_probs[mask_ind] = 0
token_probs = torch.max(token_probs, all_probs.gather(2, tgt_tokens.unsqueeze(2)).squeeze(2))
#token_probs = (token_probs + all_probs.gather(2, tgt_tokens.unsqueeze(2)).squeeze(2))/2
token_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
if self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
#token_probs = tmp_token_probs
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_fix_tokens(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
iterations = self.iterations if self.scale != 100 else self.iterations + 1
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab, tags)
visual_probs = token_probs[visual_mask]
tmp_token_probs = token_probs.clone()
all_ratio = [0.666666666666666, 0.5, 0.3, 0.3] + [0.3] * 10
#all_ratio = [0.1] * 10
index = 0
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
#tqdm.write(("Iteration %dte: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in corresponding_probs[1].tolist()]))
#tqdm.write(("Iteration %dst: " % counter) + ' '.join([('%.3f'%item if item!=1.0 else '') for item in token_probs[1].tolist()]))
if self.scale == 100:
if counter == 1:
mask_ind = (tgt_tokens == Constants.MASK)
else:
#ratio = max((1.0 - (counter / iterations)), 0.3)
#ratio = (1.0 - (counter / iterations))
ratio = all_ratio[index]
index += 1
#tqdm.write("%s"%str(ratio))
#ratio = 0.4
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
#num_mask[num_mask < 1] = 1
#mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask, limit=0.35, seq_lens=seq_lens)
mask_ind = self.select_worst(token_probs, num_mask)
else:
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = 0.3
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
#tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs, all_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, tags, signal=1, return_all_probs=True)
#print(all_probs.shape, tgt_tokens.shape)
#non_mask_ind = ~mask_ind
#token_probs[non_mask_ind] = all_probs.gather(2, tgt_tokens[non_mask_ind].unsqueeze(2)).squeeze(2)
#tqdm.write(("Iteration %d0 : " % counter) + to_sentence(new_tgt_tokens[1].tolist(), tgt_vocab))
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
#tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results and not self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
if self.collect_last:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
#token_probs = tmp_token_probs
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags):
if self.paradigm == 'mp':
return self.generate_mp(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'ap':
return self.generate_ap(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'l2r':
return self.generate_sequential(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction=0, step=self.q)
elif self.paradigm == 'r2l':
return self.generate_sequential(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, direction=1)
elif self.paradigm == 'ef':
return self.generate_easy_first(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags, step=self.q)
elif self.paradigm == 'pef':
return self.generate_parallel_easy_first(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'merge':
return self.generate_merge(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'mpr':
return self.generate_mp_refresh(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
elif self.paradigm == 'ft':
return self.generate_fix_tokens(model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab, tags)
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, tags, signal=0, zeros=[], tag_replace=None, return_all_probs=False):
decoder_out, _ = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal, tags=tags)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if return_all_probs:
return tgt_tokens, token_probs, all_probs
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, decision=True):
if teacher_model is None or not decision:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask, limit=None, seq_lens=None):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
if limit is not None:
#tmp = []
token_probs = token_probs.log()
for i in range(masks.size(0)):
#tmp = (token_probs[i, :] < limit)
tmp = token_probs[i, :].sum() / seq_lens[i]
tmp = token_probs[i, :] < tmp
if tmp.sum() < num_mask[i] and tmp.sum() != 0:
masks[i] = tmp
else:
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
#print(max(tmp), min(tmp), sum(tmp)/len(tmp))
else:
#tmp = []
for i in range(masks.size(0)):
#tmp.append(token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[0][-1].data)
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
#print(max(tmp), min(tmp), sum(tmp)/len(tmp))
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
'''
class NV(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab):
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens
t1[mask_ind] = self.visual_tag
t1, t1_probs = self.generate_non_autoregressive(model, enc_output, category, t1, pad_mask, signal=0)
tqdm.write(" Visual : " + to_sentence(t1[1].tolist(), tgt_vocab))
t2, t2_probs = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, signal=1)
tqdm.write(" Mask : " + to_sentence(t2[1].tolist(), tgt_vocab))
t1_probs[t1.eq(Constants.MASK)] = 0.0
ind = t1_probs > t2_probs
t2[ind] = t1[ind]
t2_probs[ind] = t1_probs[ind]
tqdm.write(" Fusion : " + to_sentence(t2[1].tolist(), tgt_vocab))
return t2, t2_probs
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens = seq_len - pad_mask.sum(dim=1)
collect_results = []
iterations = self.iterations
tgt_tokens, token_probs = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab)
for counter in range(1, iterations):
#corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
#corresponding_probs[pad_mask] = 1.0
ratio = (1.0 - (counter / iterations))
num_mask = (seq_lens.float() * ratio).long()
#mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
# Predict
tgt_tokens, token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, signal=1)
tqdm.write(("Iteration %d: " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens)
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, collect_results
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, signal, zeros=[], tag_replace=None):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, signal=signal)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, no_masking_desicion=False):
if teacher_model is None or no_masking_desicion:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
'''
class MS(object):
def __init__(self, iterations, seed, dict_mapping, plot=False, collect_best_candidate_iterative_results=False, **kwargs):
super().__init__()
self.iterations = iterations
self.random = np.random.RandomState(seed)
self.dict_mapping = dict_mapping
self.plot = plot
self.collect_best_candidate_iterative_results = collect_best_candidate_iterative_results
opt = kwargs['opt']
self.visual_tag = opt['visual_tag']
self.nonvisual_tag = opt['nonvisual_tag']
self.revision_tag = opt['revision_tag']
self.masking_decision = opt.get('masking_decision', False)
self.no_candidate_decision = opt.get('no_candidate_decision', False)
self.collect_best_candidate_iterative_results = opt.get('collect_best_candidate_iterative_results', False)
self.scale = opt.get('nv_scale', 0.0)
self.fixed_iterations = opt.get('fixed_iterations', -1)
self.multiscale = opt['multiscale']
if self.fixed_iterations != -1: assert self.scale > 0
assert self.fixed_iterations <= self.iterations - 2
def separation_integration(self, model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab):
mask_ind = tgt_tokens.eq(Constants.MASK)
t1, t2 = tgt_tokens.clone(), tgt_tokens
t1[mask_ind] = self.visual_tag
t1, t1_probs = self.generate_non_autoregressive(model, enc_output, category, t1, enlarge(pad_mask, self.multiscale), multiscale=True)
tmp = t1.view(-1, self.multiscale, t1.size(-1))
res = tmp.chunk(self.multiscale, dim=1)
for i in range(len(res)):
tqdm.write((" Visual%d : " % i) + to_sentence(res[i][1][0].tolist(), tgt_vocab))
t2, t2_probs = self.generate_non_autoregressive(model, enc_output, category, t2, pad_mask, multiscale=False)
tqdm.write(" Mask : " + to_sentence(t2[1].tolist(), tgt_vocab))
tqdm.write(" Fusion : " + to_sentence(t2[1].tolist(), tgt_vocab))
return t2, t2_probs, None
#return t1, t1_probs
def generate(self, model, teacher_model, enc_output, teacher_enc_output, category, tgt_tokens, tgt_vocab):
collect_results = []
collect_scores = []
bsz, seq_len = tgt_tokens.size()
pad_mask = tgt_tokens.eq(Constants.PAD)
seq_lens1 = seq_len - pad_mask.sum(dim=1)
iterations = self.iterations
tgt_tokens, token_probs, visual_mask = self.separation_integration(model, enc_output, category, tgt_tokens, pad_mask, tgt_vocab)
visual_probs = token_probs[visual_mask]
seq_lens2 = seq_lens1 - visual_mask.sum(-1) if visual_mask is not None else seq_lens1
#if visual_mask is not None:
# seq_lens = seq_lens - visual_mask.sum(-1)
#print(visual_mask.long().sum(-1).float())
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
for counter in range(1, iterations):
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=self.masking_decision)
corresponding_probs[pad_mask] = 1.0
seq_lens = seq_lens1
#ratio = max((1.0 - (counter / iterations)), 0.2)
ratio = (1.0 - (counter / iterations))
#ratio = min((1.0 - (counter / iterations)), 0.7)
num_mask = (seq_lens.float() * ratio).long()
mask_ind = self.select_worst(token_probs * corresponding_probs, num_mask)
#mask_ind = self.select_worst(token_probs, num_mask)
tgt_tokens[mask_ind] = Constants.MASK
tqdm.write(("Iteration %d1 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
# Predict
new_tgt_tokens, new_token_probs = self.generate_non_autoregressive(model, enc_output, category, tgt_tokens, pad_mask, multiscale=False)
# Predict
token_probs[mask_ind] = new_token_probs[mask_ind]
tgt_tokens[mask_ind] = new_tgt_tokens[mask_ind]
tqdm.write(("Iteration %d2 : " % counter) + to_sentence(tgt_tokens[1].tolist(), tgt_vocab))
if self.collect_best_candidate_iterative_results:
collect_results.append(tgt_tokens.clone())
collect_scores.append(token_probs.clone())
corresponding_probs = self.scoring_by_teacher(teacher_model, teacher_enc_output, category, tgt_tokens, decision=(not self.no_candidate_decision))
corresponding_probs[pad_mask] = 1.0
lprobs = (token_probs * corresponding_probs).log()
#lprobs = (token_probs).log()
return tgt_tokens, lprobs, (collect_results, collect_scores), None#visual_mask.sum(-1)
def generate_non_autoregressive(self, model, enc_output, category, tgt_tokens, pad_mask, multiscale, zeros=[], tag_replace=None):
decoder_out = model.decoder.forward_(tgt_tokens, enc_output, category, multiscale=multiscale)
tgt_tokens, token_probs, all_probs = generate_step_with_prob(model.tgt_word_prj(decoder_out), zeros=zeros)
tgt_tokens[pad_mask] = Constants.PAD
token_probs[pad_mask] = 1.0
if tag_replace is not None:
source, target = tag_replace
ind = tgt_tokens.eq(source)
tgt_tokens[ind] = target
copy_ = token_probs.clone()
token_probs[ind] = 0.0
return tgt_tokens, token_probs, copy_
return tgt_tokens, token_probs
def mapping(self, tgt_tokens):
tokens = tgt_tokens.clone().flatten()
for i, token in enumerate(tokens):
tokens[i] = self.dict_mapping[token.item()]
return tokens.view(*tgt_tokens.shape)
def scoring_by_teacher(self, teacher_model, teacher_enc_output, category, tgt_tokens, decision=True):
if teacher_model is None or not decision:
return tgt_tokens.new(*tgt_tokens.shape).fill_(1).float()
if self.dict_mapping != {}:
tokens = self.mapping(tgt_tokens)
else:
tokens = tgt_tokens
tgt_tokens_with_bos = torch.cat([tokens.new(tokens.size(0), 1).fill_(Constants.BOS), tokens], dim=1)
#print(tgt_tokens_with_bos.shape, teacher_enc_output.shape, category.shape)
decoder_out, *_ = teacher_model.decoder(tgt_tokens_with_bos[:, :-1], teacher_enc_output, category)
if isinstance(decoder_out, list):
decoder_out = decoder_out[-1]
probs = F.softmax(teacher_model.tgt_word_prj(decoder_out), dim=-1)
return probs.gather(2, tokens.unsqueeze(2)).squeeze(2)
def select_worst(self, token_probs, num_mask):
masks = torch.zeros(*token_probs.shape, device=token_probs.device)
for i in range(masks.size(0)):
ind = token_probs[i, :].topk(max(1, num_mask[i]), largest=False, sorted=False)[1]
masks[i, ind] = 1
return masks.byte()
def select_random(self, token_probs, num_mask, seq_lens):
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = self.random.choice(seq_lens[i].item(), size=max(1, num_mask[i].item()), replace=False)
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
def select_multinomial(self, token_probs, num_mask, seq_lens):
probs = torch.exp(-token_probs)
bsz, seq_len = token_probs.size()
masks = []
for i in range(bsz):
ind = probs[i, :int(seq_lens[i])].multinomial(max(1, num_mask[i].item()))
ind = list(ind)
ind += [ind[0]] * (seq_len - len(ind))
masks.append(torch.LongTensor(ind))
return torch.stack(masks, dim=0).to(token_probs.device)
| 48.519934
| 179
| 0.621911
| 18,712
| 146,045
| 4.554404
| 0.019453
| 0.08269
| 0.039896
| 0.038018
| 0.936624
| 0.924409
| 0.918601
| 0.911807
| 0.904227
| 0.90276
| 0
| 0.01426
| 0.267733
| 146,045
| 3,010
| 180
| 48.519934
| 0.782625
| 0.066452
| 0
| 0.751471
| 1
| 0
| 0.01456
| 0.003269
| 0
| 0
| 0
| 0
| 0.003676
| 1
| 0.058088
| false
| 0
| 0.007353
| 0
| 0.141912
| 0.001471
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
06551c6f5d48043d6bc563dbf75641fc2e710dfe
| 55,265
|
py
|
Python
|
pyeccodes/defs/grib2/localConcepts/eswi/units_def.py
|
ecmwf/pyeccodes
|
dce2c72d3adcc0cb801731366be53327ce13a00b
|
[
"Apache-2.0"
] | 7
|
2020-04-14T09:41:17.000Z
|
2021-08-06T09:38:19.000Z
|
pyeccodes/defs/grib2/localConcepts/eswi/units_def.py
|
ecmwf/pyeccodes
|
dce2c72d3adcc0cb801731366be53327ce13a00b
|
[
"Apache-2.0"
] | null | null | null |
pyeccodes/defs/grib2/localConcepts/eswi/units_def.py
|
ecmwf/pyeccodes
|
dce2c72d3adcc0cb801731366be53327ce13a00b
|
[
"Apache-2.0"
] | 3
|
2020-04-30T12:44:48.000Z
|
2020-12-15T08:40:26.000Z
|
import pyeccodes.accessors as _
def load(h):
def wrapped(h):
discipline = h.get_l('discipline')
parameterCategory = h.get_l('parameterCategory')
parameterNumber = h.get_l('parameterNumber')
if discipline == 3 and parameterCategory == 1 and parameterNumber == 19:
return 'm/s'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 13:
return '1/s'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 12:
return '%'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 11:
return '%'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 10:
return '%'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 9:
return '%'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 8:
return 'Degree'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 7:
return 'Degree'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 6:
return 'Numeric'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 5:
return 'm/s'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 4:
return 'm/s'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 3:
return 'Code'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 2:
return 'm'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 1:
return 'kg/m2/s'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 0:
return 'kg/m2'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 9:
return 'Code'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 8:
return 'Code'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 7:
return 'Code'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 6:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 5:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 4:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 3:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 2:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 1:
return 'Numeric'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 0:
return 'Numeric'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 17:
return 'kg/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 16:
return 'kg/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 15:
return 'm3/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 14:
return 'kg/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 13:
return 'm3/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 12:
return 'kg/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 11:
return 'm3/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 10:
return 'm3/m3'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 6:
return 'Numeric'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 27:
return 'm3/m3'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 26:
return 'kg/m3'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 25:
return 'm3/m3'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 24:
return 'W/m2'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 23:
return 'kg/m2'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 22:
return 'kg/m3'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 21:
return 'Proportion'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 20:
return 'Proportion'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 19:
return 'Proportion'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 18:
return 'Proportion'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 16:
return 's/m'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 15:
return 'm/s'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 14:
return 'm'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 13:
return 'kg/m2'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 12:
return 'kg/m2/s'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 11:
return '%'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 9:
return 'Proportion'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 8:
return 'Code'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 7:
return 'm'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 6:
return '1/kg2/s'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 3:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 1:
return 'fraction'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 3:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 0:
return 'kg/kg'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 0:
return 'Pa'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 25:
return 'Numeric'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 24:
return 'Numeric'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 23:
return 'W/m2'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 22:
return 'Numeric'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 21:
return 'rad'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 20:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 19:
return 'gpm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 18:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 17:
return 'N/m2'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 16:
return 'N/m2'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 15:
return 'gpm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 14:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 13:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 12:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 11:
return 'Pa'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 32:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 29:
return 'Numeric'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 28:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 27:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 26:
return 'N/m2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 25:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 24:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 23:
return 'm/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 86:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 85:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 84:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 83:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 68:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 67:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 66:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 65:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 64:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 62:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 60:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 59:
return 'm/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 58:
return 'm/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 57:
return 'm/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 56:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 55:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 54:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 51:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 50:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 49:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 46:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 45:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 44:
return 'Numeric'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 43:
return 'Proportion'
if discipline == 3 and parameterCategory == 1 and parameterNumber == 23:
return 1
if discipline == 3 and parameterCategory == 1 and parameterNumber == 22:
return 1
if discipline == 3 and parameterCategory == 1 and parameterNumber == 21:
return 1
if discipline == 3 and parameterCategory == 1 and parameterNumber == 20:
return 1
if discipline == 0 and parameterCategory == 19 and parameterNumber == 4:
return 'Code'
atmosphericChemicalConsituentType = h.get_l('atmosphericChemicalConsituentType')
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10006:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10022:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10021:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10012:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10002:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10001:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 12:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 20:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 14:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 16:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 18:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 15:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 13:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63011:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 60013:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10004:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10011:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10017:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 7:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10023:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10015:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10009:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10016:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10008:
return '-'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 11:
return 'J/kg'
if discipline == 10 and parameterCategory == 191 and parameterNumber == 1:
return 'm3/s'
if discipline == 10 and parameterCategory == 191 and parameterNumber == 0:
return 's'
if discipline == 1 and parameterCategory == 1 and parameterNumber == 2:
return '%'
if discipline == 1 and parameterCategory == 1 and parameterNumber == 1:
return '%'
if discipline == 1 and parameterCategory == 1 and parameterNumber == 0:
return 'kg/m2'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 6:
return 'kg/m2'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 5:
return 'kg/m2'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 4:
return '%'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 3:
return 'Code'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 2:
return 'Code'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 1:
return 'kg/m2'
if discipline == 1 and parameterCategory == 0 and parameterNumber == 0:
return 'kg/m2'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 14:
return 'Deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 15:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 13:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 12:
return 'Deg true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 11:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 10:
return 'Deg true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 9:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 8:
return 'm'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 7:
return 'Deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 6:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 5:
return 'm'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 4:
return 'Deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 3:
return 'm'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 8:
return '1/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 7:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 3:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 2:
return 'Deg true'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 1:
return 'm'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 0:
return 'Fraction'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 10:
return 'kg/m3'
if discipline == 10 and parameterCategory == 3 and parameterNumber == 0:
return 'K'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 1:
return 'Fraction'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 1:
return 'm'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 0:
return 'm'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 2:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 3:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 0:
return 'g/kg'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 3:
return 'cm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 2:
return 'cm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 1:
return 'm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 0:
return 'Deg true'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 6:
return 'm2/s2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 5:
return 'm2/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 4:
return 'm2/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 1:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 0:
return 'Deg true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 2:
return '-'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 1:
return '-'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 0:
return '-'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 2:
return 'K'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 1:
return 'Pa'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 11:
return 'J/kg'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 3:
return 'm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 2:
return 'm/s'
if discipline == 0 and parameterCategory == 191 and parameterNumber == 0:
return 's'
if discipline == 0 and parameterCategory == 190 and parameterNumber == 0:
return 'CCITTIA5'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 23:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 22:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 21:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 20:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 18:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 16:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 15:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 14:
return 'Code'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 13:
return 'Code'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 12:
return 'Code'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 10:
return 'Code'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 9:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 8:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 7:
return 'Code'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 6:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 5:
return 'm'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 5:
return 'dB'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 4:
return 'dB'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 3:
return 'm'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 2:
return 'mm6/m3'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 1:
return 'mm6/m3'
if discipline == 0 and parameterCategory == 16 and parameterNumber == 0:
return 'mm6/m3'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 5:
return 'kg/m'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 4:
return 'dB'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 3:
return 'kg/m'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 2:
return 'm/s'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 1:
return 'dB'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 0:
return 'm/s'
if discipline == 0 and parameterCategory == 14 and parameterNumber == 2:
return 'Dobson'
if discipline == 0 and parameterCategory == 14 and parameterNumber == 1:
return 'kg/kg'
if discipline == 0 and parameterCategory == 13 and parameterNumber == 0:
return 'Code'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 12:
return 'Numeric'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 11:
return 'K'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 10:
return 'K'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 9:
return 'Numeric'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 8:
return 'J/kg'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 5:
return 'Numeric'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 4:
return 'K'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 3:
return 'K'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 2:
return 'K'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 33:
return 's'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 32:
return 'Numeric'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 25:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 24:
return 'Numeric'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 23:
return 'kg/kg'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 22:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 21:
return 'Proportion'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 20:
return 'kg/m2'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 19:
return 'kg/m2'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 18:
return 'kg/m2'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 17:
return 'kg/kg'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 16:
return 'Proportion'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 15:
return 'J/kg'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 14:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 13:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 12:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 10:
return 'Code'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 9:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 8:
return 'Code'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 7:
return '%'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 6:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 5:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 4:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 3:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 51:
return 'Numeric'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 50:
return 'Numeric'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 12:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 11:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 10:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 9:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 8:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 7:
return 'W/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 53:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 52:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 22:
return 'M/S'
if discipline == 3 and parameterCategory == 0 and parameterNumber == 7:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 5:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 4:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 3:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 2:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 1:
return 'fraction'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 2:
return '%'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 1:
return '%'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 0:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 1:
return 'Pa'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 42:
return '%'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 41:
return 'W/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 40:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 39:
return '%'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 38:
return 'kg/kg/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 37:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 36:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 35:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 34:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 33:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 32:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 31:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 30:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 26:
return 'kg/kg/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 25:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 24:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 23:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 22:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 21:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 20:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 19:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 18:
return 'kg/m3'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 17:
return 'day'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 17:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 16:
return 'W/m2'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 13:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 12:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 15:
return 'K'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 12:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 5:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 4:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 3:
return 'fraction'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 1:
return 'fraction'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 1:
return '%'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 22:
return 'm/s'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 0:
return 'm'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 5:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 4:
return 'K'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 1:
return 'Pa'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 0:
return ' m'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 62000:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 40009:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 62012:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63016:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63015:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63014:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 40008:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63018:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63017:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 62001:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 40008:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 40009:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 62008:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 23:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63012:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63013:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 2:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 3:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 4:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10000:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 19:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 0:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63004:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 9:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 60004:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 60003:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63001:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63009:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63007:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 17:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 5:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 11:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63005:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63008:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 63006:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 10500:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 22:
return '-'
if discipline == 0 and parameterCategory == 20 and atmosphericChemicalConsituentType == 8:
return '-'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 22:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 30:
return 'm/s'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 6:
return 'J/kg'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 7:
return 'J/kg'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 11:
return 'J/kg'
if discipline == 2 and parameterCategory == 3 and parameterNumber == 0:
return 'code'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 61:
return '?'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 19:
return 'Fraction'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 22:
return 'Fraction'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 20:
return 'kg/m2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 21:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 19:
return 'J'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 18:
return 'N/m2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 17:
return 'N/m2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 20:
return 'W/m2'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 11:
return 'W/m2'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 10:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 6:
return 'W/m3/sr'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 5:
return 'W/m/sr'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 4:
return 'K'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 3:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 2:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 2:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 1:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 1:
return 'W/m2'
if discipline == 0 and parameterCategory == 5 and parameterNumber == 0:
return 'W/m2'
if discipline == 0 and parameterCategory == 4 and parameterNumber == 0:
return 'W/m2'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 13:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 12:
return 'deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 11:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 10:
return 'deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 9:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 8:
return 'm'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 7:
return 'deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 6:
return 's'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 5:
return 'm'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 4:
return 'deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 3:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 16:
return 'kg/m2'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 7:
return '1/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 6:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 5:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 4:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 3:
return 'm/s'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 2:
return 'deg. true'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 1:
return 'm'
if discipline == 10 and parameterCategory == 2 and parameterNumber == 0:
return 'Fraction'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 5:
return 'kg/m2'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 10:
return 'kg/m3'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 3:
return 'kg/kg'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 4:
return '%'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 3:
return 'kg/m2'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 2:
return 'K'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 1:
return '%'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 1:
return 'm'
if discipline == 10 and parameterCategory == 3 and parameterNumber == 1:
return 'm'
if discipline == 2 and parameterCategory == 0 and parameterNumber == 0:
return 'Fraction'
if discipline == 10 and parameterCategory == 3 and parameterNumber == 0:
return 'K'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 15:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 14:
return 'kg/m2'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 1:
return 'K'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 6:
return 'kg/m2'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 5:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 4:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 3:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 2:
return '%'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 1:
return '%'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 1:
return 'm'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 0:
return 'm'
if discipline == 10 and parameterCategory == 4 and parameterNumber == 2:
return 'm'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 3:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 11:
return 'm'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 13:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 12:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 10:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 9:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 8:
return 'kg/m2'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 2:
return '%'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 7:
return 'kg/m2/s'
if discipline == 0 and parameterCategory == 6 and parameterNumber == 0:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 6:
return 'm of water equivalent'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 5:
return 'Pa'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 4:
return 'Pa'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 3:
return 'kg/m2'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 2:
return 'kg/kg'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 1:
return '%'
if discipline == 0 and parameterCategory == 1 and parameterNumber == 0:
return 'kg/kg'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 3:
return 'm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 2:
return 'm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 1:
return 'm/s'
if discipline == 10 and parameterCategory == 1 and parameterNumber == 0:
return 'Deg. true'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 16:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 15:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 13:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 12:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 11:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 10:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 9:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 8:
return 'Pa/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 7:
return '1/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 6:
return 'm2/s2'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 5:
return 'm2/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 4:
return 'm2/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 3:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 2:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 1:
return 'm/s'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 0:
return 'Deg. true'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 2:
return '-'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 1:
return '-'
if discipline == 10 and parameterCategory == 0 and parameterNumber == 0:
return '-'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 9:
return 'Gpm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 8:
return 'Pa'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 9:
return 'K'
if discipline == 0 and parameterCategory == 7 and parameterNumber == 0:
return 'K'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 8:
return '-'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 7:
return '-'
if discipline == 0 and parameterCategory == 15 and parameterNumber == 6:
return '-'
if discipline == 0 and parameterCategory == 19 and parameterNumber == 0:
return 'm'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 8:
return 'K/m'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 7:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 6:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 5:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 4:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 3:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 2:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 1:
return 'K'
if discipline == 0 and parameterCategory == 0 and parameterNumber == 0:
return 'K'
if discipline == 0 and parameterCategory == 14 and parameterNumber == 0:
return 'Dobson'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 7:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 6:
return 'm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 5:
return 'Gpm'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 4:
return 'm2/s2'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 3:
return 'm'
if discipline == 0 and parameterCategory == 2 and parameterNumber == 14:
return 'K*m2 / kg / s'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 2:
return 'Pa/s'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 1:
return 'Pa'
if discipline == 0 and parameterCategory == 3 and parameterNumber == 0:
return 'Pa'
return wrapped
| 36.478548
| 102
| 0.593974
| 5,657
| 55,265
| 5.801838
| 0.02846
| 0.18281
| 0.141799
| 0.174522
| 0.976296
| 0.974346
| 0.974041
| 0.963865
| 0.903294
| 0.71372
| 0
| 0.058396
| 0.318303
| 55,265
| 1,514
| 103
| 36.502642
| 0.812789
| 0
| 0
| 0.617063
| 0
| 0
| 0.032842
| 0.000597
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001984
| false
| 0
| 0.000992
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ebebc96614196359378784e7e760115dad44e136
| 10,203
|
py
|
Python
|
tinyauth/tests/test_resources_user.py
|
Jc2k/microauth
|
ff7c9a1aa493fe50f7f59f618f3317910551b99d
|
[
"Apache-2.0"
] | 2
|
2018-06-07T18:39:37.000Z
|
2020-05-16T11:08:29.000Z
|
tinyauth/tests/test_resources_user.py
|
Jc2k/microauth
|
ff7c9a1aa493fe50f7f59f618f3317910551b99d
|
[
"Apache-2.0"
] | 2
|
2017-11-19T16:52:01.000Z
|
2018-08-11T10:49:08.000Z
|
tinyauth/tests/test_resources_user.py
|
Jc2k/microauth
|
ff7c9a1aa493fe50f7f59f618f3317910551b99d
|
[
"Apache-2.0"
] | 1
|
2018-05-26T06:03:04.000Z
|
2018-05-26T06:03:04.000Z
|
import base64
import json
from tinyauth.app import db
from tinyauth.models import User
from .base import TestCase
class TestCase(TestCase):
def test_list_users(self):
response = self.client.get(
'/api/v1/users',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
),
}
)
assert response.status_code == 200
assert json.loads(response.get_data(as_text=True)) == [{
'groups': [],
'id': 'charles',
'username': 'charles',
}, {
'groups': [],
'id': 'freddy',
'username': 'freddy',
}]
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'ListUsers'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 200,
}
def test_create_user_noauth(self):
response = self.client.post(
'/api/v1/users',
data=json.dumps({
'username': 'freddy',
}),
content_type='application/json',
)
assert response.status_code == 401
assert json.loads(response.get_data(as_text=True)) == {
'errors': {
'authorization': 'UnsignedRequest'
}
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'CreateUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 401,
'request.username': 'freddy',
'errors': {'authorization': 'UnsignedRequest'},
}
def test_create_user_with_auth(self):
response = self.client.post(
'/api/v1/users',
data=json.dumps({
'username': 'mruser',
'password': 'pAssword',
}),
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 200
assert json.loads(response.get_data(as_text=True)) == {'id': 'mruser', 'username': 'mruser', 'groups': []}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'CreateUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'request.username': 'mruser',
'http.status': 200,
'request.password': '********',
}
def test_delete_user_with_auth_but_no_perms(self):
response = self.client.delete(
'/api/v1/users/charles',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE2:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 403
assert json.loads(response.get_data(as_text=True)) == {
'errors': {
'authorization': 'NotPermitted',
}
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'DeleteUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 403,
'errors': {'authorization': 'NotPermitted'},
'request.username': 'charles',
}
def test_delete_user_with_auth(self):
user = User(username='freddy')
db.session.add(user)
db.session.commit()
response = self.client.delete(
'/api/v1/users/freddy',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 201
assert json.loads(response.get_data(as_text=True)) == {}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'DeleteUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 201,
'request.username': 'freddy',
}
def test_put_user_with_auth_but_no_perms(self):
response = self.client.put(
'/api/v1/users/charles',
data=json.dumps({
'username': 'freddy',
'password': 'password',
}),
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE2:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 403
assert json.loads(response.get_data(as_text=True)) == {
'errors': {
'authorization': 'NotPermitted',
}
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'UpdateUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 403,
'errors': {'authorization': 'NotPermitted'},
'request.username': 'charles',
# 'request.new-username': 'freddy',
# 'request.password': '********',
}
def test_put_user_with_auth(self):
user = User(username='freddy')
db.session.add(user)
db.session.commit()
response = self.client.put(
'/api/v1/users/freddy',
data=json.dumps({
'username': 'freddy',
'password': 'password',
}),
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 200
assert json.loads(response.get_data(as_text=True)) == {
'groups': [],
'id': 'freddy',
'username': 'freddy'
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'UpdateUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 200,
'request.username': 'freddy',
'request.new-username': 'freddy',
'request.password': '********',
}
def test_get_user_with_auth_but_no_perms(self):
response = self.client.get(
'/api/v1/users/charles',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE2:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 403
assert json.loads(response.get_data(as_text=True)) == {
'errors': {
'authorization': 'NotPermitted',
}
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'GetUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 403,
'errors': {'authorization': 'NotPermitted'},
'request.username': 'charles',
}
def test_get_user_with_auth(self):
user = User(username='freddy')
db.session.add(user)
db.session.commit()
response = self.client.get(
'/api/v1/users/freddy',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 200
assert json.loads(response.get_data(as_text=True)) == {
'groups': [],
'id': 'freddy',
'username': 'freddy'
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'GetUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 200,
'request.username': 'freddy',
}
def test_get_user_with_auth_but_no_perms_404(self):
response = self.client.get(
'/api/v1/users/james',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE2:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 403
assert json.loads(response.get_data(as_text=True)) == {
'errors': {
'authorization': 'NotPermitted',
}
}
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'GetUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 403,
'errors': {'authorization': 'NotPermitted'},
'request.username': 'james',
}
def test_get_user_with_auth_404(self):
response = self.client.get(
'/api/v1/users/james',
headers={
'Authorization': 'Basic {}'.format(
base64.b64encode(b'AKIDEXAMPLE:password').decode('utf-8')
)
},
content_type='application/json',
)
assert response.status_code == 404
args, kwargs = self.audit_log.call_args_list[0]
assert args[0] == 'GetUser'
assert kwargs['extra'] == {
'request-id': 'a823a206-95a0-4666-b464-93b9f0606d7b',
'http.status': 404,
'request.username': 'james',
}
| 32.807074
| 114
| 0.505832
| 943
| 10,203
| 5.336161
| 0.099682
| 0.041733
| 0.039348
| 0.052464
| 0.916534
| 0.911169
| 0.899841
| 0.891693
| 0.857313
| 0.841017
| 0
| 0.064161
| 0.350779
| 10,203
| 310
| 115
| 32.912903
| 0.695501
| 0.006371
| 0
| 0.669091
| 0
| 0
| 0.244499
| 0.053577
| 0
| 0
| 0
| 0
| 0.156364
| 1
| 0.04
| false
| 0.054545
| 0.018182
| 0
| 0.061818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
88d9f2b606c6d6da57915ae07aebdbdd3b0f8dcd
| 10,096
|
py
|
Python
|
dawn/test/unit-test/dawn/Optimizer/samples/SetLocationType.py
|
muellch/dawn
|
4fd055df809ce920ca15ffc6137b2be2aed3a2dd
|
[
"MIT"
] | 20
|
2017-09-28T14:23:54.000Z
|
2021-08-23T09:58:26.000Z
|
dawn/test/unit-test/dawn/Optimizer/samples/SetLocationType.py
|
muellch/dawn
|
4fd055df809ce920ca15ffc6137b2be2aed3a2dd
|
[
"MIT"
] | 1,018
|
2017-10-09T13:55:47.000Z
|
2022-03-14T13:16:38.000Z
|
dawn/test/unit-test/dawn/Optimizer/samples/SetLocationType.py
|
muellch/dawn
|
4fd055df809ce920ca15ffc6137b2be2aed3a2dd
|
[
"MIT"
] | 20
|
2017-09-21T10:35:24.000Z
|
2021-01-18T09:24:58.000Z
|
# -*- coding: utf-8 -*-
##===-----------------------------------------------------------------------------*- Python -*-===##
## _
## | |
## __| | __ ___ ___ ___
## / _` |/ _` \ \ /\ / / '_ |
## | (_| | (_| |\ V V /| | | |
## \__,_|\__,_| \_/\_/ |_| |_| - Compiler Toolchain
##
##
## This file is distributed under the MIT License (MIT).
## See LICENSE.txt for details.
##
##===------------------------------------------------------------------------------------------===##
"""Generate input for SetLocationType tests"""
import os
import dawn4py
from dawn4py.serialization import SIR
from dawn4py.serialization import utils as serial_utils
from google.protobuf.json_format import MessageToJson, Parse
def copy_fields():
outputfile = "../input/test_set_stage_location_type_copy_fields.sir"
interval = serial_utils.make_interval(
SIR.Interval.Start, SIR.Interval.End, 0, 0)
body_ast = serial_utils.make_ast(
[
serial_utils.make_assignment_stmt(
serial_utils.make_field_access_expr("out_cell"),
serial_utils.make_field_access_expr("in_cell"),
"=",
),
serial_utils.make_assignment_stmt(
serial_utils.make_field_access_expr("out_edge"),
serial_utils.make_field_access_expr("in_edge"),
"=",
),
serial_utils.make_assignment_stmt(
serial_utils.make_field_access_expr("out_vertex"),
serial_utils.make_field_access_expr("in_vertex"),
"=",
)
]
)
vertical_region_stmt = serial_utils.make_vertical_region_decl_stmt(
body_ast, interval, SIR.VerticalRegion.Forward
)
sir = serial_utils.make_sir(
outputfile,
SIR.GridType.Value("Unstructured"),
[
serial_utils.make_stencil(
"generated",
serial_utils.make_ast([vertical_region_stmt]),
[
serial_utils.make_field(
"in_cell",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
),
),
serial_utils.make_field(
"out_cell",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
),
),
serial_utils.make_field(
"in_edge",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Edge")], 1
),
),
serial_utils.make_field(
"out_edge",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Edge")], 1
),
),
serial_utils.make_field(
"in_vertex",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Vertex")], 1
),
),
serial_utils.make_field(
"out_vertex",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Vertex")], 1
),
),
],
),
],
)
f = open(outputfile, "w")
f.write(MessageToJson(sir))
f.close()
def copy_vars():
outputfile = "../input/test_set_stage_location_type_copy_vars.sir"
interval = serial_utils.make_interval(
SIR.Interval.Start, SIR.Interval.End, 0, 0)
body_ast = serial_utils.make_ast(
[
serial_utils.make_var_decl_stmt(
serial_utils.make_type(serial_utils.BuiltinType.Float),
"out_var_cell"),
serial_utils.make_var_decl_stmt(
serial_utils.make_type(serial_utils.BuiltinType.Float),
"out_var_edge"),
serial_utils.make_var_decl_stmt(
serial_utils.make_type(serial_utils.BuiltinType.Float),
"out_var_vertex"),
serial_utils.make_assignment_stmt(
serial_utils.make_var_access_expr("out_var_cell"),
serial_utils.make_field_access_expr("in_cell"),
"=",
),
serial_utils.make_assignment_stmt(
serial_utils.make_var_access_expr("out_var_edge"),
serial_utils.make_field_access_expr("in_edge"),
"=",
),
serial_utils.make_assignment_stmt(
serial_utils.make_var_access_expr("out_var_vertex"),
serial_utils.make_field_access_expr("in_vertex"),
"=",
)
]
)
vertical_region_stmt = serial_utils.make_vertical_region_decl_stmt(
body_ast, interval, SIR.VerticalRegion.Forward
)
sir = serial_utils.make_sir(
outputfile,
SIR.GridType.Value("Unstructured"),
[
serial_utils.make_stencil(
"generated",
serial_utils.make_ast([vertical_region_stmt]),
[
serial_utils.make_field(
"in_cell",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
),
),
serial_utils.make_field(
"in_edge",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Edge")], 1
),
),
serial_utils.make_field(
"in_vertex",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Vertex")], 1
),
),
],
),
],
)
f = open(outputfile, "w")
f.write(MessageToJson(sir))
f.close()
def if_stmt():
outputfile = "../input/test_set_stage_location_type_if_stmt.sir"
interval = serial_utils.make_interval(
SIR.Interval.Start, SIR.Interval.End, 0, 0)
body_ast = serial_utils.make_ast(
[
serial_utils.make_var_decl_stmt(
serial_utils.make_type(serial_utils.BuiltinType.Float),
"out_var_cell"),
serial_utils.make_if_stmt(serial_utils.make_expr_stmt(serial_utils.make_var_access_expr("out_var_cell")), serial_utils.make_block_stmt(serial_utils.make_assignment_stmt(
serial_utils.make_var_access_expr("out_var_cell"),
serial_utils.make_field_access_expr("in_cell"),
"=",
))),
]
)
vertical_region_stmt = serial_utils.make_vertical_region_decl_stmt(
body_ast, interval, SIR.VerticalRegion.Forward
)
sir = serial_utils.make_sir(
outputfile,
SIR.GridType.Value("Unstructured"),
[
serial_utils.make_stencil(
"generated",
serial_utils.make_ast([vertical_region_stmt]),
[
serial_utils.make_field(
"in_cell",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
),
),
],
),
],
)
f = open(outputfile, "w")
f.write(MessageToJson(sir))
f.close()
def function_call():
outputfile = "../input/test_set_stage_location_type_function_call.sir"
interval = serial_utils.make_interval(
SIR.Interval.Start, SIR.Interval.End, 0, 0)
fun_ast = serial_utils.make_ast(
[
serial_utils.make_assignment_stmt(
serial_utils.make_field_access_expr("out"),
serial_utils.make_literal_access_expr(
value="2.0", type=serial_utils.BuiltinType.Float),
"=",
),
]
)
arg_field = serial_utils.make_field(
"out",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
)
)
fun = serial_utils.make_stencil_function(
name='f', asts=[fun_ast], intervals=[interval], arguments=[serial_utils.make_stencil_function_arg(arg_field)])
body_ast = serial_utils.make_ast(
[
serial_utils.make_expr_stmt(expr=serial_utils.make_stencil_fun_call_expr(
callee="f", arguments=[serial_utils.make_field_access_expr("out_cell")])),
]
)
vertical_region_stmt = serial_utils.make_vertical_region_decl_stmt(
body_ast, interval, SIR.VerticalRegion.Forward
)
sir = serial_utils.make_sir(
outputfile,
SIR.GridType.Value("Unstructured"),
[
serial_utils.make_stencil(
"generated",
serial_utils.make_ast([vertical_region_stmt]),
[
serial_utils.make_field(
"out_cell",
serial_utils.make_field_dimensions_unstructured(
[SIR.LocationType.Value("Cell")], 1
),
),
],
),
],
functions=[fun]
)
f = open(outputfile, "w")
f.write(MessageToJson(sir))
f.close()
if __name__ == "__main__":
copy_fields()
copy_vars()
if_stmt()
function_call()
| 33.879195
| 181
| 0.506438
| 911
| 10,096
| 5.149287
| 0.110867
| 0.225112
| 0.287785
| 0.153485
| 0.874654
| 0.844596
| 0.836922
| 0.820294
| 0.794926
| 0.786399
| 0
| 0.004159
| 0.380844
| 10,096
| 297
| 182
| 33.993266
| 0.746281
| 0.060519
| 0
| 0.697581
| 0
| 0
| 0.070166
| 0.022013
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016129
| false
| 0
| 0.020161
| 0
| 0.03629
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
00479d42a2de6c9ded1f2b9462b84388067e9b02
| 3,468
|
py
|
Python
|
tests/test_lexer/test_punctuation.py
|
vbondarevsky/ones_analyzer
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 12
|
2017-11-23T07:04:13.000Z
|
2022-03-01T21:06:56.000Z
|
tests/test_lexer/test_punctuation.py
|
vbondarevsky/analyzer_test
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 2
|
2017-06-25T21:32:32.000Z
|
2017-11-19T19:05:40.000Z
|
tests/test_lexer/test_punctuation.py
|
vbondarevsky/analyzer_test
|
ab8bff875192db238ed17c20d61c9fa5b55c3fa8
|
[
"MIT"
] | 5
|
2017-11-21T08:24:56.000Z
|
2021-08-17T23:21:18.000Z
|
from analyzer.syntax_kind import SyntaxKind
from tests.utils import TestCaseLexer
class TestLexerPunctuationTokens(TestCaseLexer):
def test_tilde_token(self):
self.tokenize_source("~", 2)
self.assertToken(0, SyntaxKind.TildeToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_percent_token(self):
self.tokenize_source("%", 2)
self.assertToken(0, SyntaxKind.PercentToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_asterisk_token(self):
self.tokenize_source("*", 2)
self.assertToken(0, SyntaxKind.AsteriskToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_open_paren_token(self):
self.tokenize_source("(", 2)
self.assertToken(0, SyntaxKind.OpenParenToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_close_paren_token(self):
self.tokenize_source(")", 2)
self.assertToken(0, SyntaxKind.CloseParenToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_minus_token(self):
self.tokenize_source("-", 2)
self.assertToken(0, SyntaxKind.MinusToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_plus_token(self):
self.tokenize_source("+", 2)
self.assertToken(0, SyntaxKind.PlusToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_open_bracket_token(self):
self.tokenize_source("[", 2)
self.assertToken(0, SyntaxKind.OpenBracketToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_close_bracket_token(self):
self.tokenize_source("]", 2)
self.assertToken(0, SyntaxKind.CloseBracketToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_colon_token(self):
self.tokenize_source(":", 2)
self.assertToken(0, SyntaxKind.ColonToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_semicolon_token(self):
self.tokenize_source(";", 2)
self.assertToken(0, SyntaxKind.SemicolonToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_comma_token(self):
self.tokenize_source(",", 2)
self.assertToken(0, SyntaxKind.CommaToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_dot_token(self):
self.tokenize_source(".", 2)
self.assertToken(0, SyntaxKind.DotToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_question_token(self):
self.tokenize_source("?", 2)
self.assertToken(0, SyntaxKind.QuestionToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_slash_token(self):
self.tokenize_source("/", 2)
self.assertToken(0, SyntaxKind.SlashToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_hash_token(self):
self.tokenize_source("#", 2)
self.assertToken(0, SyntaxKind.HashToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
def test_ampersand_token(self):
self.tokenize_source("&", 2)
self.assertToken(0, SyntaxKind.AmpersandToken, [], [])
self.assertToken(1, SyntaxKind.EndOfFileToken, [], [])
| 38.533333
| 65
| 0.637543
| 341
| 3,468
| 6.319648
| 0.164223
| 0.236659
| 0.102552
| 0.165661
| 0.812993
| 0.794432
| 0.794432
| 0.532715
| 0.437123
| 0.437123
| 0
| 0.018478
| 0.204152
| 3,468
| 89
| 66
| 38.966292
| 0.762319
| 0
| 0
| 0.239437
| 0
| 0
| 0.004902
| 0
| 0
| 0
| 0
| 0
| 0.478873
| 1
| 0.239437
| false
| 0
| 0.028169
| 0
| 0.28169
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc7a58e02dbacf70fe4cb3f94db71b096ee8a8ab
| 279
|
py
|
Python
|
Lista08/ex011.py
|
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
|
f306c8dc6385ee8c9580e687afa16a49ace68f95
|
[
"MIT"
] | 2
|
2021-09-05T22:29:33.000Z
|
2021-09-09T00:13:16.000Z
|
Lista08/ex011.py
|
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
|
f306c8dc6385ee8c9580e687afa16a49ace68f95
|
[
"MIT"
] | null | null | null |
Lista08/ex011.py
|
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
|
f306c8dc6385ee8c9580e687afa16a49ace68f95
|
[
"MIT"
] | null | null | null |
a = [[int(input('1° matriz: ')) for i in range(5)] for j in range(5)]
b = [[int(input('2° matriz: ')) for i in range(5)] for j in range(5)]
dif = [[0 for i in range(5)] for j in range(5)]
for i in range(5):
for j in range(5):
dif[i][j] = a[i][j] - b[i][j]
print(dif)
| 34.875
| 69
| 0.541219
| 65
| 279
| 2.353846
| 0.261538
| 0.366013
| 0.418301
| 0.359477
| 0.75817
| 0.75817
| 0.75817
| 0.75817
| 0.75817
| 0.75817
| 0
| 0.050926
| 0.225806
| 279
| 7
| 70
| 39.857143
| 0.648148
| 0
| 0
| 0
| 0
| 0
| 0.078853
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
cc888188d8d11567b5fdc918151bf0a380a4060b
| 135
|
py
|
Python
|
ivy/array/gradients.py
|
saurbhc/ivy
|
20b327b4fab543b26ad5a18acf4deddd6e3c804b
|
[
"Apache-2.0"
] | 161
|
2021-01-20T22:11:13.000Z
|
2022-01-09T09:46:33.000Z
|
ivy/array/gradients.py
|
saurbhc/ivy
|
20b327b4fab543b26ad5a18acf4deddd6e3c804b
|
[
"Apache-2.0"
] | 4
|
2021-11-10T17:04:36.000Z
|
2021-11-26T06:40:43.000Z
|
ivy/array/gradients.py
|
saurbhc/ivy
|
20b327b4fab543b26ad5a18acf4deddd6e3c804b
|
[
"Apache-2.0"
] | 8
|
2021-02-17T20:56:33.000Z
|
2022-01-09T16:45:40.000Z
|
# global
import abc
# ToDo: implement all gradient methods here as public class methods
class ArrayWithGradients(abc.ABC):
pass
| 15
| 67
| 0.762963
| 18
| 135
| 5.722222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 135
| 8
| 68
| 16.875
| 0.936364
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ccac0d383dea3fab8788f992e3256484c10c5b00
| 7,657
|
py
|
Python
|
test/unit/test_decorators.py
|
dim-lumigo/aws-secretsmanager-caching-python
|
ec907d5de637724c35e4df108617638462c0ec81
|
[
"Apache-2.0"
] | 92
|
2019-05-07T02:04:50.000Z
|
2022-03-15T03:45:58.000Z
|
test/unit/test_decorators.py
|
dim-lumigo/aws-secretsmanager-caching-python
|
ec907d5de637724c35e4df108617638462c0ec81
|
[
"Apache-2.0"
] | 13
|
2019-05-11T16:04:48.000Z
|
2021-12-27T05:29:54.000Z
|
test/unit/test_decorators.py
|
dim-lumigo/aws-secretsmanager-caching-python
|
ec907d5de637724c35e4df108617638462c0ec81
|
[
"Apache-2.0"
] | 22
|
2019-05-09T05:40:11.000Z
|
2022-01-23T16:39:50.000Z
|
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
Unit test suite for decorators module
"""
import json
import unittest
import botocore
from aws_secretsmanager_caching.decorators import InjectKeywordedSecretString, InjectSecretString
from aws_secretsmanager_caching.secret_cache import SecretCache
from botocore.stub import Stubber
class TestAwsSecretsManagerCachingInjectKeywordedSecretStringDecorator(unittest.TestCase):
def get_client(self, response={}, versions=None, version_response=None):
client = botocore.session.get_session().create_client('secretsmanager', region_name='us-west-2')
stubber = Stubber(client)
expected_params = {'SecretId': 'test'}
if versions:
response['VersionIdsToStages'] = versions
stubber.add_response('describe_secret', response, expected_params)
if version_response is not None:
stubber.add_response('get_secret_value', version_response)
stubber.activate()
return client
def test_valid_json(self):
secret = {
'username': 'secret_username',
'password': 'secret_password'
}
secret_string = json.dumps(secret)
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret_string}
cache = SecretCache(client=self.get_client(response, versions, version_response))
@InjectKeywordedSecretString(secret_id='test', cache=cache, func_username='username', func_password='password')
def function_to_be_decorated(func_username, func_password, keyworded_argument='foo'):
self.assertEqual(secret['username'], func_username)
self.assertEqual(secret['password'], func_password)
self.assertEqual(keyworded_argument, 'foo')
return 'OK'
self.assertEqual(function_to_be_decorated(), 'OK')
def test_valid_json_with_mixed_args(self):
secret = {
'username': 'secret_username',
'password': 'secret_password'
}
secret_string = json.dumps(secret)
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret_string}
cache = SecretCache(client=self.get_client(response, versions, version_response))
@InjectKeywordedSecretString(secret_id='test', cache=cache, arg2='username', arg3='password')
def function_to_be_decorated(arg1, arg2, arg3, arg4='bar'):
self.assertEqual(arg1, 'foo')
self.assertEqual(secret['username'], arg2)
self.assertEqual(secret['password'], arg3)
self.assertEqual(arg4, 'bar')
function_to_be_decorated('foo')
def test_valid_json_with_no_secret_kwarg(self):
secret = {
'username': 'secret_username',
'password': 'secret_password'
}
secret_string = json.dumps(secret)
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret_string}
cache = SecretCache(client=self.get_client(response, versions, version_response))
@InjectKeywordedSecretString('test', cache=cache, func_username='username', func_password='password')
def function_to_be_decorated(func_username, func_password, keyworded_argument='foo'):
self.assertEqual(secret['username'], func_username)
self.assertEqual(secret['password'], func_password)
self.assertEqual(keyworded_argument, 'foo')
function_to_be_decorated()
def test_invalid_json(self):
secret = 'not json'
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret}
cache = SecretCache(client=self.get_client(response, versions, version_response))
with self.assertRaises((RuntimeError, json.decoder.JSONDecodeError)):
@InjectKeywordedSecretString(secret_id='test', cache=cache, func_username='username',
func_passsword='password')
def function_to_be_decorated(func_username, func_password, keyworded_argument='foo'):
return
function_to_be_decorated()
def test_missing_key(self):
secret = {'username': 'secret_username'}
secret_string = json.dumps(secret)
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret_string}
cache = SecretCache(client=self.get_client(response, versions, version_response))
with self.assertRaises((RuntimeError, ValueError)):
@InjectKeywordedSecretString(secret_id='test', cache=cache, func_username='username',
func_passsword='password')
def function_to_be_decorated(func_username, func_password, keyworded_argument='foo'):
return
function_to_be_decorated()
class TestAwsSecretsManagerCachingInjectSecretStringDecorator(unittest.TestCase):
def get_client(self, response={}, versions=None, version_response=None):
client = botocore.session.get_session().create_client('secretsmanager', region_name='us-west-2')
stubber = Stubber(client)
expected_params = {'SecretId': 'test'}
if versions:
response['VersionIdsToStages'] = versions
stubber.add_response('describe_secret', response, expected_params)
if version_response is not None:
stubber.add_response('get_secret_value', version_response)
stubber.activate()
return client
def test_string(self):
secret = 'not json'
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret}
cache = SecretCache(client=self.get_client(response, versions, version_response))
@InjectSecretString('test', cache)
def function_to_be_decorated(arg1, arg2, arg3):
self.assertEqual(arg1, secret)
self.assertEqual(arg2, 'foo')
self.assertEqual(arg3, 'bar')
return 'OK'
self.assertEqual(function_to_be_decorated('foo', 'bar'), 'OK')
def test_string_with_additional_kwargs(self):
secret = 'not json'
response = {}
versions = {
'01234567890123456789012345678901': ['AWSCURRENT']
}
version_response = {'SecretString': secret}
cache = SecretCache(client=self.get_client(response, versions, version_response))
@InjectSecretString('test', cache)
def function_to_be_decorated(arg1, arg2, arg3):
self.assertEqual(arg1, secret)
self.assertEqual(arg2, 'foo')
self.assertEqual(arg3, 'bar')
function_to_be_decorated(arg2='foo', arg3='bar')
| 39.469072
| 119
| 0.659788
| 750
| 7,657
| 6.518667
| 0.193333
| 0.061362
| 0.034363
| 0.060135
| 0.786459
| 0.754756
| 0.74596
| 0.74596
| 0.720597
| 0.720597
| 0
| 0.044239
| 0.238344
| 7,657
| 193
| 120
| 39.673575
| 0.794067
| 0.075095
| 0
| 0.722222
| 0
| 0
| 0.134362
| 0.031715
| 0
| 0
| 0
| 0
| 0.138889
| 1
| 0.111111
| false
| 0.104167
| 0.041667
| 0.013889
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
aec0b84bf3146ec05fe151d897c36932b0717de8
| 191
|
py
|
Python
|
kngetx/__init__.py
|
urain39/KngetPyX
|
f1b76fdab6339880ec004621c36618e6a1596167
|
[
"MIT"
] | 4
|
2018-08-07T06:04:19.000Z
|
2018-09-27T13:44:05.000Z
|
knget/__init__.py
|
urain39/KngetPy
|
00986bc16a497cee08aceb1c072f6187f152ee5d
|
[
"MIT"
] | 21
|
2018-06-07T12:47:05.000Z
|
2019-05-06T03:22:56.000Z
|
knget/__init__.py
|
urain39/KngetPy
|
00986bc16a497cee08aceb1c072f6187f152ee5d
|
[
"MIT"
] | 5
|
2018-06-04T09:32:05.000Z
|
2019-01-22T13:37:27.000Z
|
# -*- coding: utf-8 -*-
from .__version__ import __author__
from .__version__ import __email__
from .__version__ import __version__
from .__version__ import __license__
from .base import *
| 21.222222
| 36
| 0.780105
| 22
| 191
| 5.318182
| 0.454545
| 0.376068
| 0.581197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006098
| 0.141361
| 191
| 8
| 37
| 23.875
| 0.707317
| 0.109948
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9d6958e2aa02b4a98950aadc37dea0bd55e95515
| 101
|
py
|
Python
|
codigos/teste/teste.py
|
lucastheo/servidor-de-dados
|
0aa2df18a8202d32d6a4430090492fb0249e0904
|
[
"Apache-2.0"
] | null | null | null |
codigos/teste/teste.py
|
lucastheo/servidor-de-dados
|
0aa2df18a8202d32d6a4430090492fb0249e0904
|
[
"Apache-2.0"
] | null | null | null |
codigos/teste/teste.py
|
lucastheo/servidor-de-dados
|
0aa2df18a8202d32d6a4430090492fb0249e0904
|
[
"Apache-2.0"
] | null | null | null |
#execucao_direta 1 python3 {esse-arquivo}
#programado_batch 1 python3 {esse-arquivo}
print("INICIO")
| 25.25
| 42
| 0.792079
| 14
| 101
| 5.571429
| 0.714286
| 0.205128
| 0.307692
| 0.487179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.089109
| 101
| 4
| 43
| 25.25
| 0.804348
| 0.80198
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
9d7337b8b7b636473793ff8b700bc0e93eeb4c57
| 243
|
py
|
Python
|
com_blacktensor/ext/__init__.py
|
Jelly6489/Stock-Proj
|
3e7b1ad5cddc5b142f0069e024199fe969c7c7e8
|
[
"MIT"
] | null | null | null |
com_blacktensor/ext/__init__.py
|
Jelly6489/Stock-Proj
|
3e7b1ad5cddc5b142f0069e024199fe969c7c7e8
|
[
"MIT"
] | null | null | null |
com_blacktensor/ext/__init__.py
|
Jelly6489/Stock-Proj
|
3e7b1ad5cddc5b142f0069e024199fe969c7c7e8
|
[
"MIT"
] | 2
|
2020-11-13T08:11:04.000Z
|
2020-11-14T05:32:09.000Z
|
from datetime import datetime
print('=================================================================')
print(f'com_blackTensor_api.ext init. time : {datetime.now()}')
print('=================================================================')
| 60.75
| 74
| 0.341564
| 16
| 243
| 5.0625
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041152
| 243
| 4
| 75
| 60.75
| 0.347639
| 0
| 0
| 0.5
| 0
| 0
| 0.75
| 0.627049
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.75
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
9de15b50390b09b3ea615e001065d1cd8c550f90
| 121
|
py
|
Python
|
TDD - HVAC/hvac_Exercise/test/test_EnvironmentController.py
|
musen-rse/examples_python
|
75f2afb94c9ee2e1f1333638d186c2c3c3a96436
|
[
"MIT"
] | 1
|
2022-01-23T14:02:47.000Z
|
2022-01-23T14:02:47.000Z
|
TDD - HVAC/hvac_Exercise/test/test_EnvironmentController.py
|
musen-rse/examples_python
|
75f2afb94c9ee2e1f1333638d186c2c3c3a96436
|
[
"MIT"
] | 1
|
2021-09-12T07:00:30.000Z
|
2021-09-12T07:00:30.000Z
|
TDD - HVAC/hvac_Exercise/test/test_EnvironmentController.py
|
musen-rse/examples_python
|
75f2afb94c9ee2e1f1333638d186c2c3c3a96436
|
[
"MIT"
] | 1
|
2022-01-23T14:02:29.000Z
|
2022-01-23T14:02:29.000Z
|
import unittest
from src.EnvironmentController import EnvironmentController
from src.EnvironmentController import HVAC
| 20.166667
| 59
| 0.884298
| 12
| 121
| 8.916667
| 0.5
| 0.130841
| 0.523364
| 0.635514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099174
| 121
| 5
| 60
| 24.2
| 0.981651
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9dffdbb21df304fbca6eeddbed72bef98723603f
| 1,596
|
py
|
Python
|
teachers/migrations/0003_auto_20210109_1312.py
|
18praneeth/udayagiri-scl-maxo
|
67ac939265d7837e39329162d7dd935a52130978
|
[
"MIT"
] | 8
|
2021-01-01T17:04:45.000Z
|
2021-06-24T05:53:13.000Z
|
teachers/migrations/0003_auto_20210109_1312.py
|
18praneeth/udayagiri-scl-maxo
|
67ac939265d7837e39329162d7dd935a52130978
|
[
"MIT"
] | 11
|
2021-01-01T15:04:04.000Z
|
2021-01-10T07:47:12.000Z
|
teachers/migrations/0003_auto_20210109_1312.py
|
18praneeth/udayagiri-scl-maxo
|
67ac939265d7837e39329162d7dd935a52130978
|
[
"MIT"
] | 7
|
2020-12-14T12:44:17.000Z
|
2021-01-15T14:29:13.000Z
|
# Generated by Django 3.1.5 on 2021-01-09 07:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('teachers', '0002_remove_teacher_rate'),
]
operations = [
migrations.AddField(
model_name='teacher',
name='facebook_url',
field=models.CharField(blank=True, max_length=400),
),
migrations.AddField(
model_name='teacher',
name='instagram_url',
field=models.CharField(blank=True, max_length=400),
),
migrations.AddField(
model_name='teacher',
name='linkedin_url',
field=models.CharField(blank=True, max_length=400),
),
migrations.AddField(
model_name='teacher',
name='rating',
field=models.CharField(blank=True, max_length=300),
),
migrations.AddField(
model_name='teacher',
name='requests',
field=models.CharField(blank=True, max_length=300),
),
migrations.AddField(
model_name='teacher',
name='salary_annual',
field=models.CharField(blank=True, max_length=300),
),
migrations.AddField(
model_name='teacher',
name='salary_monthly',
field=models.CharField(blank=True, max_length=300),
),
migrations.AddField(
model_name='teacher',
name='twitter_url',
field=models.CharField(blank=True, max_length=400),
),
]
| 29.555556
| 63
| 0.558897
| 155
| 1,596
| 5.593548
| 0.303226
| 0.16609
| 0.212226
| 0.249135
| 0.756632
| 0.756632
| 0.712803
| 0.712803
| 0.712803
| 0.662053
| 0
| 0.039889
| 0.324561
| 1,596
| 53
| 64
| 30.113208
| 0.764378
| 0.028195
| 0
| 0.680851
| 1
| 0
| 0.114267
| 0.015494
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ae8ff1294d078c0f04e2e8f5abe65a299b8853c9
| 76,270
|
py
|
Python
|
tests/payment_manager_test.py
|
phillipgreenii/loan_payoff_tools
|
4ffb8a83f7fe6bf7eb37eb7165b3959422d3a515
|
[
"MIT"
] | null | null | null |
tests/payment_manager_test.py
|
phillipgreenii/loan_payoff_tools
|
4ffb8a83f7fe6bf7eb37eb7165b3959422d3a515
|
[
"MIT"
] | 3
|
2015-05-03T02:16:49.000Z
|
2015-05-08T21:25:01.000Z
|
tests/payment_manager_test.py
|
phillipgreenii/loan_payoff_tools
|
4ffb8a83f7fe6bf7eb37eb7165b3959422d3a515
|
[
"MIT"
] | null | null | null |
'''
loan_payoff_tools: Test module.
Meant for use with py.test.
Write each test as a function named test_<something>.
Read more here: http://pytest.org/
Copyright 2014, Phillip Green II
Licensed under MIT
'''
import unittest
from datetime import date
from loan_payoff_tools.payment_manager import Account
from loan_payoff_tools.payment_manager import MinimumPaymentManager
from loan_payoff_tools.payment_manager import PayMostInterestPaymentPaymentManager
from loan_payoff_tools.payment_manager import PayLeastInterestPaymentPaymentManager
from loan_payoff_tools.payment_manager import SmallestDebtPaymentManager
from loan_payoff_tools.payment_manager import BiggestDebtPaymentManager
from loan_payoff_tools.payment_manager import WeightedSplitPaymentManager
from loan_payoff_tools.payment_manager import EvenSplitPaymentManager
from loan_payoff_tools.payment_manager import SpecifiedSplitPaymentManager
from loan_payoff_tools.max_payment_determiner import ConstantMaxPaymentDeterminer
from loan_payoff_tools.money import Money
import loan_payoff_tools.money as money
class PaymentManagerMakePaymentsTestCase(unittest.TestCase):
def assertMaxTotalPaymentNotExceeded(self, payments):
fudge_factory = Money("0.01")
self.assertLessEqual(sum(payments.values(), Money(0)), self.max_total_payment + fudge_factory)
def assertTotalBalanceNotExceeded(self, payments, initial_account_to_balances):
fudge_factory = Money("0.01")
self.assertLessEqual(sum(payments.values(), Money(0)), sum(initial_account_to_balances.values(), Money(0)) + fudge_factory)
def assertMinimumPayments(self, payments, initial_account_to_balances):
def min_for(a):
return min(a.minimum_payment, initial_account_to_balances[a])
improper_payment_accounts = [a for a, p in payments.items() if p < min_for(a)]
if len(improper_payment_accounts) > 0:
messages = ["\tpayment ({}) for {} is less than minimum ({})".format(payments[a], a, min_for(a))
for a in improper_payment_accounts]
self.fail("Minimum Payments not met\n" + "\n".join(messages))
class TestMinimumPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = MinimumPaymentManager()
def test_make_payments_should_pay_minimum(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 55.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 6000, 0.04, 70.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 7000, 0.03, 60.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(100.00), account1: Money(100.00), account2: Money(100.00)}
expected_payments = {account0: Money(55.00), account1: Money(70.00), account2: Money(60.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_pay_minimum(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 55.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 6000, 0.04, 70.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 7000, 0.03, 60.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(100.00), account1: Money(100.00), account2: Money(100.00)}
expected_payments = {account0: Money(55.00), account1: Money(70.00), account2: Money(60.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_pay_no_more_than_current_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 55.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 6000, 0.04, 70.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 7000, 0.03, 60.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(100.00), account1: Money(25.00), account2: Money(45.00)}
expected_payments = {account0: Money(55.00), account1: Money(25.00), account2: Money(45.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_pay_no_more_than_current_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 55.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 6000, 0.04, 70.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 7000, 0.03, 60.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(100.00), account1: Money(25.00), account2: Money(45.00)}
expected_payments = {account0: Money(55.00), account1: Money(25.00), account2: Money(45.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestPayMostInterestPaymentPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = PayMostInterestPaymentPaymentManager()
def test_make_payments_should_order_by_debtor_id_debtee_when_identical_accounts_and_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_debtor_id_debtee_when_identical_accounts_and_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_highest_balance_when_identical_accounts(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(50.00), account1: Money(900.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_highest_balance_when_identical_accounts(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(0.00), account1: Money(1000.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_highest_interest_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_highest_interest_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_weighted_interest_and_balance_when_different(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(4750.00), account2: Money(4900.00)}
expected_payments = {account0: Money(50.00), account1: Money(900.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_weighted_interest_and_balance_when_different(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(4750.00), account2: Money(4900.00)}
expected_payments = {account0: Money(0.00), account1: Money(1000.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(500.00), account2: Money(100.00)}
expected_payments = {account0: Money(450.00), account1: Money(500.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(500.00), account2: Money(100.00)}
expected_payments = {account0: Money(500.00), account1: Money(500.00), account2: Money(00.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestPayLeastInterestPaymentPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = PayLeastInterestPaymentPaymentManager()
def test_make_payments_should_order_by_debtor_id_debtee_when_identical_accounts_and_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_debtor_id_debtee_when_identical_accounts_and_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_lowest_balance_when_identical_accounts(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4600.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(50.00), account1: Money(50.00), account2: Money(900.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_lowest_balance_when_identical_accounts(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4600.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(0.00), account1: Money(0.00), account2: Money(1000.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_lowest_interest_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_lowest_interest_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_weighted_interest_and_balance_when_different(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(2500.00), account1: Money(3000.00), account2: Money(4900.00)}
expected_payments = {account0: Money(50.00), account1: Money(900.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_weighted_interest_and_balance_when_different(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(2500.00), account1: Money(3000.00), account2: Money(4900.00)}
expected_payments = {account0: Money(0.00), account1: Money(1000.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(200.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(500.00), account2: Money(100.00)}
expected_payments = {account0: Money(500.00), account1: Money(400.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(500.00), account1: Money(500.00), account2: Money(100.00)}
expected_payments = {account0: Money(500.00), account1: Money(400.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestSmallestDebtPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = SmallestDebtPaymentManager()
def test_make_payments_should_order_by_debtor_id_debtee_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_debtor_id_debtee_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_lowest_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4300.00), account2: Money(4500.00)}
expected_payments = {account0: Money(50.00), account1: Money(900.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_lowest_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4300.00), account2: Money(4500.00)}
expected_payments = {account0: Money(0.00), account1: Money(1000.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(100.00)}
expected_payments = {account0: Money(500.00), account1: Money(400.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(100.00)}
expected_payments = {account0: Money(500.00), account1: Money(400.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestBiggestDebtPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = BiggestDebtPaymentManager()
def test_make_payments_should_order_by_debtor_id_debtee_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(900.00), account1: Money(50.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_debtor_id_debtee_when_identical_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(1000.00), account1: Money(0.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_order_by_biggest_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(50.00), account1: Money(900.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_order_by_biggest_balance(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4800.00), account2: Money(4500.00)}
expected_payments = {account0: Money(0.00), account1: Money(1000.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(100.00)}
expected_payments = {account0: Money(600.00), account1: Money(350.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_in_order(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(100.00)}
expected_payments = {account0: Money(600.00), account1: Money(400.00), account2: Money(0.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestWeightedSplitPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = WeightedSplitPaymentManager()
def test_make_payments_should_have_same_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_same_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_same_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_same_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_weigh_by_balance_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(181.82), account1: Money(363.64), account2: Money(454.55)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_weigh_by_balance_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(125), account1: Money(375), account2: Money(500)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_min_plus_same_payments_when_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(269.23), account1: Money(346.15), account2: Money(384.62)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_min_plus_same_payments_when_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_min_plus_weight_by_balance_when_different_balance_and_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(2000.00), account1: Money(1000.00), account2: Money(1500.00)}
expected_payments = {account0: Money(335.37), account1: Money(274.39), account2: Money(390.24)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_min_plus_weight_by_balance_when_different_balance_and_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(2000.00), account1: Money(1000.00), account2: Money(1500.00)}
expected_payments = {account0: Money(444.44), account1: Money(222.22), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(569.44), account1: Money(380.56), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(571.43), account1: Money(380.95), account2: Money(47.62)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestEvenSplitPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = EvenSplitPaymentManager()
def test_make_payments_should_have_same_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_same_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_same_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_same_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_same_payments_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_same_payments_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_min_plus_same_payments_when_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(250.00), account1: Money(350.00), account2: Money(400.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_min_plus_same_payments_when_different_min(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(333.33), account1: Money(333.33), account2: Money(333.33)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(550.00), account1: Money(400.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(550.00), account1: Money(400.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
class TestSpecifiedSplitPaymentManager(PaymentManagerMakePaymentsTestCase):
def setUp(self):
self.max_total_payment = Money(1000)
self.payment_manager = SpecifiedSplitPaymentManager({"Bank0": 0.60, "Bank1": 0.40})
def test_make_payments_should_have_correct_split_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(305.00), account1: Money(305.00), account2: Money(390.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_correct_split_payments_when_identical(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(300.00), account1: Money(300.00), account2: Money(400.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_correct_split_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(305.00), account1: Money(305.00), account2: Money(390.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_correct_split_payments_when_different_interests(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.04, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.02, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(4500.00), account1: Money(4500.00), account2: Money(4500.00)}
expected_payments = {account0: Money(300.00), account1: Money(300.00), account2: Money(400.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_correct_split_payments_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(174.23), account1: Money(435.77), account2: Money(390.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_correct_split_payments_when_different_balances(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(3000.00), account2: Money(4000.00)}
expected_payments = {account0: Money(150.00), account1: Money(450.00), account2: Money(400.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_have_correct_split_payments_when_different_min_payments(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(240.00), account1: Money(320.00), account2: Money(440.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_have_correct_split_payments_when_different_min_payments(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 150.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 200.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(1000.00), account1: Money(1000.00), account2: Money(1000.00)}
expected_payments = {account0: Money(300.00), account1: Money(300.00), account2: Money(400.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(567.65), account1: Money(382.35), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 100.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(600.00), account1: Money(400.00), account2: Money(50.00)}
expected_payments = {account0: Money(570.00), account1: Money(380.00), account2: Money(50.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
def test_make_payments_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, False)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
self.assertMinimumPayments(payments, accounts_to_balances)
def test_make_payments_with_ignored_minimums_should_split_excess_if_account_becomes_paid_off_continueally(self):
account0 = Account("Bank0", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account1 = Account("Bank0", "01", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
account2 = Account("Bank1", "00", "Joe", 5000, 0.03, 50.00, date(2014, 5, 1))
accounts_to_balances = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
expected_payments = {account0: Money(300.00), account1: Money(200.00), account2: Money(100.00)}
payments = self.payment_manager(self.max_total_payment, accounts_to_balances, True)
self.assertEqual(payments, expected_payments)
self.assertMaxTotalPaymentNotExceeded(payments)
self.assertTotalBalanceNotExceeded(payments, accounts_to_balances)
if __name__ == '__main__':
unittest.main()
| 58.714396
| 135
| 0.702714
| 9,805
| 76,270
| 5.258032
| 0.023661
| 0.056445
| 0.100204
| 0.052488
| 0.961691
| 0.961614
| 0.959849
| 0.951644
| 0.951644
| 0.951644
| 0
| 0.129422
| 0.164521
| 76,270
| 1,298
| 136
| 58.75963
| 0.67964
| 0.002648
| 0
| 0.898268
| 0
| 0
| 0.033671
| 0
| 0
| 0
| 0
| 0
| 0.316017
| 1
| 0.101732
| false
| 0
| 0.015152
| 0.001082
| 0.127706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88179989efb660b69e8fecda6260ad99d94697e6
| 11,309
|
py
|
Python
|
Data Science and Machine Learning/Machine-Learning-In-Python-THOROUGH/EXAMPLES/EDABIT/EXPERT/001_100/84_first_n_digits_of_pi_ha_ha_ha_ha.py
|
okara83/Becoming-a-Data-Scientist
|
f09a15f7f239b96b77a2f080c403b2f3e95c9650
|
[
"MIT"
] | null | null | null |
Data Science and Machine Learning/Machine-Learning-In-Python-THOROUGH/EXAMPLES/EDABIT/EXPERT/001_100/84_first_n_digits_of_pi_ha_ha_ha_ha.py
|
okara83/Becoming-a-Data-Scientist
|
f09a15f7f239b96b77a2f080c403b2f3e95c9650
|
[
"MIT"
] | null | null | null |
Data Science and Machine Learning/Machine-Learning-In-Python-THOROUGH/EXAMPLES/EDABIT/EXPERT/001_100/84_first_n_digits_of_pi_ha_ha_ha_ha.py
|
okara83/Becoming-a-Data-Scientist
|
f09a15f7f239b96b77a2f080c403b2f3e95c9650
|
[
"MIT"
] | 2
|
2022-02-09T15:41:33.000Z
|
2022-02-11T07:47:40.000Z
|
"""
https://edabit.com/challenge/BHBXNfeMsA43d8Tys
First n Digits of Pi
As far as we currently know, approximations for the mathematical constant pi (π) in the history of mathematics started surfacing with Ancient Babylonians, who found its correct truncation up to 1 decimal place. During the 5th century, the Chinese mathematician Zu Chongzhi raised it to 7 decimal places and from the 18th century onwards the number of correct pi decimal places has seen steady growth.
Since the middle of the 20th century, the approximation of pi has been the task of electronic digital computers. During the 2019 Pi Day on the 14th of March, the Japanese computer scientist Emma Haruka Iwao released the currently most accurate value of pi with more than 31.4 trillion digits, using 170 Terabytes of data.
Your task is to create a function that takes a positive integer n as an argument and returns the value of pi with its first n decimal digits.
Taylor series are usually used to get finer approximations. To make this challenge approachable to anyone, the following formula is suggested:
Examples
pi(1) ➞ "3.1"
pi(2) ➞ "3.14"
pi(30) ➞ "3.141592653589793238462643383279"
Notes
N/A
"""
def pi(n):
PI = "3.1415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679821480865132823066470938446095505822317253594081284811174502841027019385211055596446229489549303819644288109756659334461284756482337867831652712019091456485669234603486104543266482133936072602491412737245870066063155881748815209209628292540917153643678925903600113305305488204665213841469519415116094330572703657595919530921861173819326117931051185480744623799627495673518857527248912279381830119491298336733624406566430860213949463952247371907021798609437027705392171762931767523846748184676694051320005681271452635608277857713427577896091736371787214684409012249534301465495853710507922796892589235420199561121290219608640344181598136297747713099605187072113499999983729780499510597317328160963185950244594553469083026425223082533446850352619311881710100031378387528865875332083814206171776691473035982534904287554687311595628638823537875937519577818577805321712268066130019278766111959092164201989380952572010654858632788659361533818279682303019520353018529689957736225994138912497217752834791315155748572424541506959508295331168617278558890750983817546374649393192550604009277016711390098488240128583616035637076601047101819429555961989467678374494482553797747268471040475346462080466842590694912933136770289891521047521620569660240580381501935112533824300355876402474964732639141992726042699227967823547816360093417216412199245863150302861829745557067498385054945885869269956909272107975093029553211653449872027559602364806654991198818347977535663698074265425278625518184175746728909777727938000816470600161452491921732172147723501414419735685481613611573525521334757418494684385233239073941433345477624168625189835694855620992192221842725502542568876717904946016534668049886272327917860857843838279679766814541009538837863609506800642251252051173929848960841284886269456042419652850222106611863067442786220391949450471237137869609563643719172874677646575739624138908658326459958133904780275900994657640789512694683983525957098258226205224894077267194782684826014769909026401363944374553050682034962524517493996514314298091906592509372216964615157098583874105978859597729754989301617539284681382686838689427741559918559252459539594310499725246808459872736446958486538367362226260991246080512438843904512441365497627807977156914359977001296160894416948685558484063534220722258284886481584560285060168427394522674676788952521385225499546667278239864565961163548862305774564980355936345681743241125150760694794510965960940252288797108931456691368672287489405601015033086179286809208747609178249385890097149096759852613655497818931297848216829989487226588048575640142704775551323796414515237462343645428584447952658678210511413547357395231134271661021359695362314429524849371871101457654035902799344037420073105785390621983874478084784896833214457138687519435064302184531910484810053706146806749192781911979399520614196634287544406437451237181921799983910159195618146751426912397489409071864942319615679452080951465502252316038819301420937621378559566389377870830390697920773467221825625996615014215030680384477345492026054146659252014974428507325186660021324340881907104863317346496514539057962685610055081066587969981635747363840525714591028970641401109712062804390397595156771577004203378699360072305587631763594218731251471205329281918261861258673215791984148488291644706095752706957220917567116722910981690915280173506712748583222871835209353965725121083579151369882091444210067510334671103141267111369908658516398315019701651511685171437657618351556508849099898599823873455283316355076479185358932261854896321329330898570642046752590709154814165498594616371802709819943099244889575712828905923233260972997120844335732654893823911932597463667305836041428138830320382490375898524374417029132765618093773444030707469211201913020330380197621101100449293215160842444859637669838952286847831235526582131449576857262433441893039686426243410773226978028073189154411010446823252716201052652272111660396665573092547110557853763466820653109896526918620564769312570586356620185581007293606598764861179104533488503461136576867532494416680396265797877185560845529654126654085306143444318586769751456614068007002378776591344017127494704205622305389945613140711270004078547332699390814546646458807972708266830634328587856983052358089330657574067954571637752542021149557615814002501262285941302164715509792592309907965473761255176567513575178296664547791745011299614890304639947132962107340437518957359614589019389713111790429782856475032031986915140287080859904801094121472213179476477726224142548545403321571853061422881375850430633217518297986622371721591607716692547487389866549494501146540628433663937900397692656721463853067360965712091807638327166416274888800786925602902284721040317211860820419000422966171196377921337575114959501566049631862947265473642523081770367515906735023507283540567040386743513622224771589150495309844489333096340878076932599397805419341447377441842631298608099888687413260472156951623965864573021631598193195167353812974167729478672422924654366800980676928238280689964004824354037014163149658979409243237896907069779422362508221688957383798623001593776471651228935786015881617557829735233446042815126272037343146531977774160319906655418763979293344195215413418994854447345673831624993419131814809277771038638773431772075456545322077709212019051660962804909263601975988281613323166636528619326686336062735676303544776280350450777235547105859548702790814356240145171806246436267945612753181340783303362542327839449753824372058353114771199260638133467768796959703098339130771098704085913374641442822772634659470474587847787201927715280731767907707157213444730605700733492436931138350493163128404251219256517980694113528013147013047816437885185290928545201165839341965621349143415956258658655705526904965209858033850722426482939728584783163057777560688876446248246857926039535277348030480290058760758251047470916439613626760449256274204208320856611906254543372131535958450687724602901618766795240616342522577195429162991930645537799140373404328752628889639958794757291746426357455254079091451357111369410911939325191076020825202618798531887705842972591677813149699009019211697173727847684726860849003377024242916513005005168323364350389517029893922334517220138128069650117844087451960121228599371623130171144484640903890644954440061986907548516026327505298349187407866808818338510228334508504860825039302133219715518430635455007668282949304137765527939751754613953984683393638304746119966538581538420568533862186725233402830871123282789212507712629463229563989898935821167456270102183564622013496715188190973038119800497340723961036854066431939509790190699639552453005450580685501956730229219139339185680344903982059551002263535361920419947455385938102343955449597783779023742161727111723643435439478221818528624085140066604433258885698670543154706965747458550332323342107301545940516553790686627333799585115625784322988273723198987571415957811196358330059408730681216028764962867446047746491599505497374256269010490377819868359381465741268049256487985561453723478673303904688383436346553794986419270563872931748723320837601123029911367938627089438799362016295154133714248928307220126901475466847653576164773794675200490757155527819653621323926406160136358155907422020203187277605277219005561484255518792530343513984425322341576233610642506390497500865627109535919465897514131034822769306247435363256916078154781811528436679570611086153315044521274739245449454236828860613408414863776700961207151249140430272538607648236341433462351897576645216413767969031495019108575984423919862916421939949072362346468441173940326591840443780513338945257423995082965912285085558215725031071257012668302402929525220118726767562204154205161841634847565169998116141010029960783869092916030288400269104140792886215078424516709087000699282120660418371806535567252532567532861291042487761825829765157959847035622262934860034158722980534989650226291748788202734209222245339856264766914905562842503912757710284027998066365825488926488025456610172967026640765590429099456815065265305371829412703369313785178609040708667114965583434347693385781711386455873678123014587687126603489139095620099393610310291616152881384379099042317473363948045759314931405297634757481193567091101377517210080315590248530906692037671922033229094334676851422144773793937517034436619910403375111735471918550464490263655128162288244625759163330391072253837421821408835086573917715096828874782656995995744906617583441375223970968340800535598491754173818839994469748676265516582765848358845314277568790029095170283529716344562129640435231176006651012412006597558512761785838292041974844236080071930457618932349229279650198751872127267507981255470958904556357921221033346697499235630254947802490114195212382815309114079073860251522742995818072471625916685451333123948049470791191532673430282441860414263639548000448002670496248201792896476697583183271314251702969234889627668440323260927524960357996469256504936818360900323809293459588970695365349406034021665443755890045632882250545255640564482465151875471196218443965825337543885690941130315095261793780029741207665147939425902989695946995565761218656196733786236256125216320862869222103274889218654364802296780705765615144632046927906821207388377814233562823608963208068222468012248261177185896381409183903673672220888321513755600372798394004152970028783076670944474560134556417254370906979396122571429894671543578468788614445812314593571984922528471605049221242470141214780573455105008019086996033027634787081081754501193071412233908663938339529425786905076431006383519834389341596131854347546495569781038293097164651438407007073604112373599843452251610507027056235266012764848308407611830130527932054274628654036036745328651057065874882256981579367897669742205750596834408697350201410206723585020072452256326513410559240190274216248439140359989535394590944070469120914093870012645600162374288021092764579310657922955249887275846101264836999892256959688159205600101655256375678"
return PI[:n+2]
#pi(1) #➞ "3.1"
#pi(2) #➞ "3.14"
pi(30) #➞ "3.141592653589793238462643383279"
| 364.806452
| 10,013
| 0.97303
| 224
| 11,309
| 49.151786
| 0.575893
| 0.00109
| 0.001635
| 0.002361
| 0.008901
| 0.008901
| 0.008901
| 0.008901
| 0.008901
| 0.008901
| 0
| 0.91142
| 0.019719
| 11,309
| 31
| 10,014
| 364.806452
| 0.081183
| 0.109647
| 0
| 0
| 0
| 0
| 0.994432
| 0.994432
| 0
| 1
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
88196cea06cce645b6d290c923b2241a43de7864
| 8,681
|
py
|
Python
|
pyenc.py
|
dev-zarir/pyenc
|
b3cf7a935794ed7a8ee6e9113cfb6b9bbd0d290c
|
[
"MIT"
] | 1
|
2022-02-22T16:15:50.000Z
|
2022-02-22T16:15:50.000Z
|
pyenc.py
|
dev-zarir/pyenc
|
b3cf7a935794ed7a8ee6e9113cfb6b9bbd0d290c
|
[
"MIT"
] | null | null | null |
pyenc.py
|
dev-zarir/pyenc
|
b3cf7a935794ed7a8ee6e9113cfb6b9bbd0d290c
|
[
"MIT"
] | null | null | null |
import marshal
exec(marshal.loads(b'c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\n\x00\x00\x00@\x00\x00\x00s\x8c\x01\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01d\x00d\x01l\x02Z\x02d\x00d\x02l\x03m\x04Z\x04\x01\x00d\x00d\x03l\x05m\x06Z\x06\x01\x00d\x00d\x04l\x01m\x07Z\x07\x01\x00d\x00d\x05l\x01m\x08Z\x08m\tZ\t\x01\x00d\x00d\x06l\nm\x0bZ\x0b\x01\x00z\x10d\x00d\x07l\x0cm\rZ\r\x01\x00W\x00n\x1a\x01\x00\x01\x00\x01\x00e\x0ed\x08\x83\x01\x01\x00e\x0f\x83\x00\x01\x00Y\x00n\x020\x00d\x00d\tl\x10m\x11Z\x11m\x12Z\x12\x01\x00d\nZ\x13d\x1bd\x0cd\r\x84\x01Z\x14d\x0ed\x0f\x84\x00Z\x15d\x10d\x11\x84\x00Z\x16d\x12d\x13\x84\x00Z\x17d\x14d\x15\x84\x00Z\x18d\x16d\x17\x84\x00Z\x19d\x18d\x19\x84\x00Z\x1ae\x1bd\x1ak\x02\x90\x01r\x88e\x16\x83\x00\x90\x01sDd\x0be\r_\x1cd\x0be\r_\x1dd\x0be\r_\x1ed\x0be\r_\x1fd\x0be\r_ d\x0be\r_!d\x0be\r_"d\x0be\r_#d\x0be\r_$d\x0be\r_%d\x0be\r_&d\x0be\r_\'d\x0be\r_(d\x0be\r_)d\x0be\r_*d\x0be\r_+z\ne\x1a\x83\x00\x01\x00W\x00n8\x04\x00e,\x90\x01y\x86\x01\x00Z-\x01\x00z\x1ee\x0ee\rj\x1de-\x83\x02\x01\x00e\x18\x83\x00\x01\x00W\x00Y\x00d\x01Z-[-n\nd\x01Z-[-0\x000\x00d\x01S\x00)\x1c\xe9\x00\x00\x00\x00N)\x01\xda\x05sleep)\x01\xda\x05dumps)\x01\xda\x08makedirs)\x02\xda\x06system\xda\x04name)\x01\xda\x06exists)\x01\xda\x04ForezlImport error! No module named "colorama". Install it using "pip install colorama" or "pip3 install colorama")\x02\xda\x06choice\xda\x07randintz!https://mrperfectit.blogspot.com/\xda\x00c\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\xf6\x00\x00\x00t\x00|\x00\xa0\x01d\x01d\x02\xa1\x02d\x03\x83\x02\x8f }\x03|\x03\xa0\x02\xa1\x00}\x04|\x03\xa0\x03\xa1\x00\x01\x00W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00s80\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00t\x04t\x05|\x04d\x04d\x05\x83\x03\x83\x01}\x05|\x02d\x06k\x02r^d\x07}\x02|\x02d\x08d\x00\x85\x02\x19\x00d\tk\x02sv|\x02d\t7\x00}\x02|\x01d\x06k\x02r\x84|\x02}\x06n&z\x0ct\x06|\x01\x83\x01\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x020\x00|\x01d\x02\x17\x00|\x02\x17\x00}\x06t\x00|\x06\xa0\x01d\x01d\x02\xa1\x02d\n\x83\x02\x8f&}\x03d\x0b|\x05\x9b\x00d\x0c\x9d\x03}\x07|\x03\xa0\x07|\x07\xa1\x01\x01\x00W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00s\xe80\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00d\x00S\x00)\rN\xfa\x01\\\xfa\x01/\xda\x01r\xda\x06random\xda\x04execr\n\x00\x00\x00\xda\x0eencrypted_file\xe9\xfd\xff\xff\xffz\x03.py\xda\x01wz"import marshal\nexec(marshal.loads(z\x03))\n)\x08\xda\x04open\xda\x07replace\xda\x04read\xda\x05closer\x02\x00\x00\x00\xda\x07compiler\x03\x00\x00\x00\xda\x05write)\x08\xda\tfile_path\xda\x0boutput_path\xda\x10output_file_name\xda\x04file\xda\x04code\xda\x08enc_code\xda\x06output\xda\tfile_code\xa9\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x07encrypt\x12\x00\x00\x00s$\x00\x00\x00\x00\x01\x14\x01\x08\x01&\x01\x10\x01\x08\x01\x04\x01\x10\x01\x08\x01\x08\x01\x06\x02\x02\x01\x0c\x01\x06\x01\x06\x01\x0c\x01\x14\x01\x0c\x01r"\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x1e\x00\x00\x00t\x00d\x01k\x02r\x12t\x01d\x02\x83\x01\x01\x00n\x08t\x01d\x03\x83\x01\x01\x00d\x00S\x00)\x04N\xda\x02nt\xda\x03cls\xda\x05clear)\x02r\x05\x00\x00\x00r\x04\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00r%\x00\x00\x00\'\x00\x00\x00s\x06\x00\x00\x00\x00\x01\x08\x01\n\x02r%\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s>\x00\x00\x00t\x00j\x01}\x00|\x00d\x01k\x03o\x1e|\x00d\x02k\x03p\x1ed\x03t\x02j\x03v\x00}\x01t\x04t\x00j\x05d\x04\x83\x02o4t\x00j\x05\xa0\x06\xa1\x00}\x02|\x01o<|\x02S\x00)\x05Nz\tPocket PC\xda\x05win32\xda\x07ANSICON\xda\x06isatty)\x07\xda\x03sys\xda\x08platform\xda\x02os\xda\x07environ\xda\x07hasattr\xda\x06stdoutr(\x00\x00\x00)\x03\xda\x04plat\xda\x12supported_platform\xda\x08is_a_ttyr!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x0esupports_color-\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x06\x01\x1a\x01\x16\x01r2\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s@\x00\x00\x00g\x00d\x01\xa2\x01}\x00t\x00j\x01t\x00j\x02t\x00j\x03t\x00j\x04t\x00j\x05t\x00j\x06g\x06}\x01|\x00D\x00]\x12}\x02t\x07t\x08|\x01\x83\x01|\x02\x83\x02\x01\x00q(d\x00S\x00)\x02N)\x0bz3$$$$$$$\\ $$\\ $$\\ $$$$$$$$\\ $$\\ $$\\ $$$$$$\\ z2$$ __$$\\$$\\ $$ |$$ _____|$$$\\ $$ |$$ __$$\\ z3$$ | $$ |\\$$\\ $$ / $$ | $$$$\\ $$ |$$ / \\__|z3$$$$$$$ | \\$$$$ / $$$$$\\ $$ $$\\$$ |$$ | z3$$ ____/ \\$$ / $$ __| $$ \\$$$$ |$$ | z3$$ | $$ | $$ | $$ |\\$$$ |$$ | $$\\ z3$$ | $$ | $$$$$$$$\\ $$ | \\$$ |\\$$$$$$ |z3\\__| \\__| \\________|\\__| \\__| \\______/ \xfa3 z3 mrperfectit.blogspot.com | Mr. Perfect IT r3\x00\x00\x00)\tr\x07\x00\x00\x00\xda\x04BLUE\xda\x04CYAN\xda\x05GREEN\xda\x06YELLOW\xda\x05WHITE\xda\x03RED\xda\x05printr\x08\x00\x00\x00)\x03\xda\x01b\xda\ncolor_list\xda\x04textr!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x06banner3\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x08\x0b\x1c\x01\x08\x01r>\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x14\x00\x00\x00t\x00t\x01j\x02\x83\x01\x01\x00t\x03\x83\x00\x01\x00d\x00S\x00\xa9\x01N)\x04r:\x00\x00\x00r\x07\x00\x00\x00\xda\x05RESET\xda\x04exitr!\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x04quitC\x00\x00\x00s\x04\x00\x00\x00\x00\x01\n\x01rB\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x10\x00\x00\x00t\x00\x83\x00\x01\x00t\x01\x83\x00\x01\x00d\x00S\x00r?\x00\x00\x00)\x02r%\x00\x00\x00r>\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x06splashG\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x06\x01rC\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xc2\x00\x00\x00t\x00\x83\x00\x01\x00t\x01t\x02j\x03d\x01\x17\x00t\x02j\x04\x17\x00\x83\x01}\x00t\x05|\x00\x83\x01s<t\x06t\x02j\x07d\x02\x17\x00t\x02j\x04\x17\x00\x83\x01\x01\x00t\x08\x83\x00\x01\x00t\x01t\x02j\x03d\x03\x17\x00t\x02j\x04\x17\x00\x83\x01}\x01t\x01t\x02j\x03d\x04\x17\x00t\x02j\x04\x17\x00\x83\x01}\x02t\x00\x83\x00\x01\x00t\x06t\x02j\td\x05\x17\x00\x83\x01\x01\x00t\nt\x0bd\x06d\x07\x83\x02\x83\x01\x01\x00t\x0c|\x00|\x01|\x02\x83\x03\x01\x00t\x00\x83\x00\x01\x00t\x06t\x02j\x03d\x08\x17\x00\x83\x01\x01\x00t\nd\t\x83\x01\x01\x00t\rj\x0et\x0fd\nd\x0bd\x0c\x8d\x03\x01\x00d\x00S\x00)\rNz%[*] Enter your file path (required): z [-] Error: File does not exists!z$[*] Enter output folder (optional): z&[*] Enter output filename (optional): z/[+] Please wait, we are processing your file...\xe9\x02\x00\x00\x00\xe9\x08\x00\x00\x00z%[+] Successfully Encrypted your file!\xe9\x01\x00\x00\x00r\x00\x00\x00\x00T)\x02\xda\x03new\xda\tautoraise)\x10rC\x00\x00\x00\xda\x05inputr\x07\x00\x00\x00r6\x00\x00\x00r@\x00\x00\x00r\x06\x00\x00\x00r:\x00\x00\x00r9\x00\x00\x00rB\x00\x00\x00r4\x00\x00\x00r\x01\x00\x00\x00r\t\x00\x00\x00r"\x00\x00\x00\xda\nwebbrowserr\x13\x00\x00\x00\xda\x03url)\x03r\x19\x00\x00\x00r\x1a\x00\x00\x00r\x1b\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x04mainK\x00\x00\x00s\x1e\x00\x00\x00\x00\x01\x06\x01\x14\x01\x08\x01\x14\x01\x06\x01\x14\x01\x14\x01\x06\x01\x0e\x01\x0e\x01\x0c\x01\x06\x01\x0e\x01\x08\x01rL\x00\x00\x00\xda\x08__main__)\x02r\n\x00\x00\x00r\n\x00\x00\x00).r)\x00\x00\x00r+\x00\x00\x00rJ\x00\x00\x00\xda\x04timer\x01\x00\x00\x00\xda\x07marshalr\x02\x00\x00\x00r\x03\x00\x00\x00r\x04\x00\x00\x00r\x05\x00\x00\x00\xda\x07os.pathr\x06\x00\x00\x00\xda\x08coloramar\x07\x00\x00\x00r:\x00\x00\x00rA\x00\x00\x00r\x0e\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00rK\x00\x00\x00r"\x00\x00\x00r%\x00\x00\x00r2\x00\x00\x00r>\x00\x00\x00rB\x00\x00\x00rC\x00\x00\x00rL\x00\x00\x00\xda\x08__name__\xda\x05BLACKr9\x00\x00\x00r6\x00\x00\x00r7\x00\x00\x00r4\x00\x00\x00r5\x00\x00\x00r8\x00\x00\x00r@\x00\x00\x00\xda\rLIGHTBLACK_EX\xda\x0bLIGHTRED_EX\xda\rLIGHTGREEN_EX\xda\x0eLIGHTYELLOW_EX\xda\x0cLIGHTBLUE_EX\xda\x0fLIGHTMAGENTA_EX\xda\x0cLIGHTCYAN_EX\xda\rLIGHTWHITE_EX\xda\tException\xda\x01er!\x00\x00\x00r!\x00\x00\x00r!\x00\x00\x00r\x0e\x00\x00\x00\xda\x08<module>\x01\x00\x00\x00sX\x00\x00\x00\x08\x01\x08\x01\x08\x01\x0c\x01\x0c\x01\x0c\x01\x10\x01\x0c\x01\x02\x01\x10\x01\x06\x01\x08\x01\x0c\x01\x10\x02\x04\x02\n\x15\x08\x06\x08\x06\x08\x10\x08\x04\x08\x04\x08\x11\n\x01\x08\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x02\x01\n\x01\x10\x01\x0c\x01'))
| 2,893.666667
| 8,664
| 0.725262
| 1,762
| 8,681
| 3.526107
| 0.192963
| 0.291647
| 0.217286
| 0.189281
| 0.428456
| 0.32947
| 0.274907
| 0.239659
| 0.201996
| 0.170288
| 0
| 0.34397
| 0.032485
| 8,681
| 2
| 8,665
| 4,340.5
| 0.395761
| 0
| 0
| 0
| 0
| 1.5
| 0.782809
| 0.690287
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 15
|
88404560539cedcbcef9fdb9269ca9880c96a19e
| 20,786
|
py
|
Python
|
workout_performance.py
|
megantoronto/crossfit-open-app
|
381b6f340c487b209ab7ce0e643e68a8ab36c261
|
[
"Apache-2.0"
] | null | null | null |
workout_performance.py
|
megantoronto/crossfit-open-app
|
381b6f340c487b209ab7ce0e643e68a8ab36c261
|
[
"Apache-2.0"
] | null | null | null |
workout_performance.py
|
megantoronto/crossfit-open-app
|
381b6f340c487b209ab7ce0e643e68a8ab36c261
|
[
"Apache-2.0"
] | null | null | null |
import streamlit as st
import psycopg2
#import os
import pandas as pd
import numpy as np
#import matplotlib.pyplot as plt
#import matplotlib
from datetime import timedelta
import plotly.graph_objects as go
import copy
import plotly.express as px
import plotly.figure_factory as ff
from total_reps import create_conn,load_data,load_result_data,format_time,calc_total_reps,calc_table_height,flatten_list,gen_table_colors
def app():
st.title("Workout Performance")
headerColor = 'grey'
rowEvenColor = 'lightgrey'
rowOddColor = 'white'
special=['16.2']
df_move = load_data('movements')
df_rep = load_data('rep_rounds')
df_mbw = load_data('movements_by_workout')
df_workout_desc = load_data('workout_desc')
df_table = load_data("movements_label")
df_table = df_table.fillna('')
df_weight=load_data('weight')
dropdown = df_mbw.sort_values(['year','workout'],ascending=[False,True])['workout']
workout = st.selectbox(label="Workout",options=dropdown)
year = df_mbw[df_mbw['workout']==workout]['year'].values[0]
gender = st.selectbox(label="Gender",options=["Men","Women"])
bucket = st.text_input(label="Select # of Athletes",value="50")
order = st.selectbox(label="Rank Type",options=["Workout Rank","Overall Rank"])
if "a" in workout:
workout_num = workout[workout.find(".")+1:]
else:
workout_num = int(workout[workout.find(".")+1:])
workout_text = df_workout_desc[df_workout_desc['workout']==workout]['workout_desc'].values[0]
workout_text=workout_text.replace(r'\n','\n')
score_data = load_result_data(str.lower(gender),int(year),workout_num,int(bucket),order=order)
final_dict,movements,total_reps,time_domain,d = calc_total_reps(workout,score_data,df_rep,workout_num,gender,special)
st.subheader("Workout Description")
st.markdown(workout_text)
#st.text(final_dict)
#st.text(d.keys())
label_exceptions={'squat_clean': 'Squat Clean','snatch': 'Snatch','deadlift': 'Deadlift','clean_and_jerk': 'Clean and Jerk','squat_snatch': 'Squat Snatch'}
movements_labeled =[]
for m in list(d.keys()):
#st.text(d.keys())
if m != "rest":
if m[:-2] not in label_exceptions.keys():
movements_labeled.append(df_move[df_move['movement']==m]['label'].values[0])
else:
#st.text([v for v in list(d.keys()) if m[:-2] in v])
if len([v for v in list(d.keys()) if m[:-2] in v]) >1:
l = label_exceptions[m[:-2]]+" "+str(m[-1:])
else:
l= label_exceptions[m[:-2]]
movements_labeled.append(l)
if (df_rep[df_rep['workout']==workout]['type'].values[0]=="AMRAP") & (pd.isnull(df_rep[df_rep['workout']==workout]['movement_1_rep_addition'].values[0])):
final_df = pd.concat([score_data,pd.DataFrame(final_dict)],axis=1)
final_df['breakdown_'+str(workout_num)]=final_df["breakdown_"+str(workout_num)].apply(lambda x: x.replace(r'\n','\n') if not pd.isnull(x) else x)
if workout not in ("14.3","13.1","12.1","12.2","11.3"):
col_names = ['Workout Rank','Athlete Name','Score','Score Detail','Rounds','Reps','Avg Time Per Round']
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)],
final_df['breakdown_'+str(workout_num)],final_df['rounds'],final_df['reps'],final_df['avg_time_per_round']]
col_names.extend(movements_labeled)
vals_to_add = [final_df[m] for m in list(d.keys()) if m != 'rest']
vals.extend(vals_to_add)
else:
col_names = ['Workout Rank','Athlete Name','Score','Score Detail','Rounds','Reps']
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)],
final_df['breakdown_'+str(workout_num)],final_df['rounds'],final_df['reps']]
if workout == '14.3':
col_names.extend(movements_labeled[:-1])
vals_to_add = [final_df[m] for m in list(d.keys())[:-1] if m != 'rest']
else:
col_names.extend(movements_labeled)
vals_to_add = [final_df[m] for m in list(d.keys()) if m != 'rest']
vals.extend(vals_to_add)
final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]
table_colors=gen_table_colors(final_df,rowEvenColor,rowOddColor)
fig_final = go.Figure(data=[go.Table(columnwidth=[1,1.5,1,1,1],header=dict(values=col_names,
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=vals,
line_color='darkslategray',
fill_color = [table_colors*6],
font = dict(size = 16),
align = ['center','left',"center"],
height=30))],)
fig_final.update_layout(margin=dict(l=10,r=10, b=10,t=10),width=1200)
final_df['raw_score'] = final_df['scoredisplay_'+str(workout_num)].apply(lambda score: int(score[:score.find(" ")]) if "reps" in score else int(score))
final_df_copy=final_df.drop(columns=["rounds","reps"])
avg_df = round(final_df_copy.mean(axis=0))
avg_df['Score']=round(avg_df['raw_score'])
avg_df['Rounds']=round(avg_df['Score']//total_reps)
#st.text(total_reps)
avg_df['Reps']=round(((avg_df['Score']/total_reps)-avg_df['Rounds'])*total_reps)
x=str(time_domain/avg_df['Score']*total_reps)
x=':'.join(x.split(':')[1:])
if workout not in ("14.3","13.1","12.1","12.2","11.3"):
avg_df['Time Per Round']=x
if 'scoredisplay_'+str(workout_num) in avg_df.index:
avg_df=avg_df.drop(index=['scoredisplay_'+str(workout_num)])
avg_df=avg_df.drop(index=['rank_'+str(workout_num),'raw_score'])
avg_df = pd.DataFrame(avg_df).reset_index()
avg_df.columns=['Movement','Average Reps']
fig_average = go.Figure(data=[go.Table(header=dict(values=["Movement","Average Reps"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[avg_df['Movement'],avg_df['Average Reps']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ['center'],
height=30))],layout=dict(height=calc_table_height(avg_df)-150))
fig_average.update_layout(margin=dict(l=10,r=10, b=10,t=10))
st.write("__Results__")
st.plotly_chart(fig_final)
st.write("__Average Rounds, Reps, & Time Per Round__")
st.plotly_chart(fig_average)
elif df_rep[df_rep['workout']==workout]['type'].values[0]=="for_load":
final_df = pd.concat([score_data,pd.DataFrame(final_dict)],axis=1)
#final_df=final_df[['competitorname','scoredisplay_'+str(workout_num)]]
final_df['raw_score'] = final_df['scoredisplay_'+str(workout_num)].apply(lambda score: int(score[:score.find(" l")]) if "l" in score else int(score))
avg_df = round(final_df.mean(axis=0))
if "scoredisplay_"+str(workout_num) in avg_df.index:
avg_df = avg_df.drop(index=["scoredisplay_"+str(workout_num)])
#avg_df = avg_df.drop(columns=[movement_col])
avg_df[movements_labeled[0]]=str(int(avg_df['raw_score']))+ " lbs"
avg_df=avg_df.drop(index=['rank_'+str(workout_num),movements[0],'raw_score'])
avg_df=pd.DataFrame(avg_df).reset_index()
avg_df.columns=['Movement','Weight']
final_df=final_df.drop(columns=['raw_score'])
col_names = ['Workout Rank','Athlete Name','Score']
#col_names.extend(movements_labeled)
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)]]
final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]
table_colors=gen_table_colors(final_df,rowEvenColor,rowOddColor)
fig_final = go.Figure(data=[go.Table(columnwidth=[1,1.5,1],header=dict(values=col_names,
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=vals,
line_color='darkslategray',
fill_color = [table_colors*6],
font = dict(size = 16),
align = ['center','left',"center"],
height=30))],)
fig_final.update_layout(margin=dict(l=10,r=10, b=10,t=10),width=1200)
fig_average = go.Figure(data=[go.Table(columnwidth=[1.5,1],header=dict(values=["Movement","Average Weight"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[avg_df['Movement'],avg_df['Weight']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ['center'],
height=30))],layout=dict(height=calc_table_height(avg_df)-150))
fig_average.update_layout(margin=dict(l=10,r=10, b=10,t=10))
st.write("__Results__")
st.plotly_chart(fig_final)
st.write("__Average Weight Lifted__")
st.plotly_chart(fig_average)
elif (df_rep[df_rep['workout']==workout]['type'].values[0]=="AMRAP") or (df_rep[df_rep['workout']==workout]['type'].values[0]=="to_failure"):
final_df = pd.concat([score_data,pd.DataFrame(final_dict)],axis=1)
final_df['breakdown_'+str(workout_num)]=final_df["breakdown_"+str(workout_num)].apply(lambda x: x.replace(r'\n','\n') if not pd.isnull(x) else x)
col_names = ['Workout Rank','Athlete Name','Score','Score Detail']
col_names.extend(movements_labeled)
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]]
vals_to_add = [final_df[m] for m in list(d.keys()) if m != 'rest']
vals.extend(vals_to_add)
final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]
table_colors=gen_table_colors(final_df,rowEvenColor,rowOddColor)
fig_final = go.Figure(data=[go.Table(columnwidth=[1,1.5,1,1,1],header=dict(values=col_names,
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=vals,
line_color='darkslategray',
fill_color = [table_colors*6],
font = dict(size = 16),
align = ['center','left',"center"],
height=30))],)
fig_final.update_layout(margin=dict(l=10,r=10, b=10,t=10),width=1200)
final_df['Score'] = final_df['scoredisplay_'+str(workout_num)].apply(lambda score: int(score[:score.find(" ")]) if "reps" in score else int(score))
avg_df = round(pd.DataFrame(final_df.mean(axis=0)))
if "scoredisplay_"+str(workout_num) in avg_df.index:
avg_df = avg_df.drop(index=["scoredisplay_"+str(workout_num)])
avg_df=avg_df.drop(index=['rank_'+str(workout_num)])
avg_df=avg_df.reset_index()
#st.dataframe(avg_df)
avg_df.columns = ['Movement','Average Reps']
#avg_df['Movement']=avg_df['Movement'].apply(lambda x: df_move[df_move['movement']==x]['label'].values[0])
fig_average = go.Figure(data=[go.Table(header=dict(values=["Movement","Average Reps"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[avg_df['Movement'],avg_df['Average Reps']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ['center'],
height=30))],layout=dict(height=calc_table_height(avg_df)-150))
fig_average.update_layout(margin=dict(l=10,r=10, b=10,t=10))
st.write("__Results__")
st.plotly_chart(fig_final)
st.write("__Average Reps Completed__")
st.plotly_chart(fig_average)
elif (df_rep[df_rep['workout']==workout]['type'].values[0]=="for_time") & pd.notnull(df_rep[df_rep['workout']==workout]['rounds'].values[0]):
final_df = pd.concat([score_data,pd.DataFrame(final_dict)],axis=1)
final_df['breakdown_'+str(workout_num)]=final_df["breakdown_"+str(workout_num)].apply(lambda x: x.replace(r'\n','\n') if not pd.isnull(x) else x)
#
#final_df['rank_'+str(workout_num)],final_df.competitorname,
#final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)],final_df.wall_walk,final_df.double_under
col_names = ['Workout Rank','Athlete Name','Score','Score Detail','Avg Time Per Round']
col_names.extend(movements_labeled)
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)],final_df['avg_time_per_round']]
vals_to_add = [final_df[m] for m in list(d.keys()) if m != 'rest']
vals.extend(vals_to_add)
final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]
table_colors=gen_table_colors(final_df,rowEvenColor,rowOddColor)
fig_final = go.Figure(data=[go.Table(columnwidth=[1,1.5,1,1,1],header=dict(values=col_names,
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=vals,
line_color='darkslategray',
fill_color = [table_colors*6],
font = dict(size = 16),
align = ['center','left',"center"],
height=30))],)
fig_final.update_layout(margin=dict(l=10,r=10, b=10,t=10),width=1200)
final_df['Total Reps']=final_df['scoredisplay_'+str(workout_num)].apply(lambda x: int(x[:x.find(" ")]) if "reps" in x else total_reps)
#st.dataframe(final_df)
final_df=final_df.drop(columns=['rank_'+str(workout_num)])
avg_df = round(pd.DataFrame(final_df.mean(axis=0)))
finish=pd.DataFrame()
finish['Finishers']=[len(final_df)-len(final_df[final_df['scoredisplay_'+str(workout_num)].str.contains("reps")])]
finish['Average Finish Time']=[format_time(np.mean(final_df[final_df['scoredisplay_'+str(workout_num)].str.contains(":")]['scoredisplay_'+str(workout_num)].apply(lambda score: timedelta(minutes=int(score[:score.find(":")]),seconds=int(score[score.find(":")+1:])))))]
finish['Average Time Per Round']=[format_time(np.mean(final_df[final_df['scoredisplay_'+str(workout_num)].str.contains(":")]['scoredisplay_'+str(workout_num)].apply(lambda score: timedelta(minutes=int(score[:score.find(":")]),seconds=int(score[score.find(":")+1:]))))/int(df_rep[df_rep['workout']==workout]['rounds'].values[0]))]
avg_df=avg_df.reset_index()
#st.dataframe(avg_df)
avg_df.columns = ['Movement','Average Reps']
#avg_df['Movement']=avg_df['Movement'].apply(lambda x: df_move[df_move['movement']==x]['label'].values[0])
fig_average = go.Figure(data=[go.Table(header=dict(values=["Movement","Average Reps"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[avg_df['Movement'],avg_df['Average Reps']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ['center'],
height=30))],layout=dict(height=calc_table_height(avg_df)-150))
fig_average.update_layout(margin=dict(l=10,r=10, b=10,t=10))
fig_finish = go.Figure(data=[go.Table(header=dict(values=["Finishers","Average Finish Time","Average Time Per Round"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[finish['Finishers'],finish['Average Finish Time'],finish['Average Time Per Round']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ["center"],
height=30))],)
fig_finish.update_layout(margin=dict(l=10,r=10, b=10,t=10))
st.write("__Results__")
st.plotly_chart(fig_final)
st.write("__Average Reps Completed__")
st.plotly_chart(fig_average)
st.write("__Finisher Stats__")
st.plotly_chart(fig_finish)
else:
final_df = pd.concat([score_data,pd.DataFrame(final_dict)],axis=1)
final_df['breakdown_'+str(workout_num)]=final_df["breakdown_"+str(workout_num)].apply(lambda x: x.replace(r'\n','\n') if not pd.isnull(x) else x)
#
#final_df['rank_'+str(workout_num)],final_df.competitorname,
#final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)],final_df.wall_walk,final_df.double_under
col_names = ['Workout Rank','Athlete Name','Score','Score Detail']
col_names.extend(movements_labeled)
vals = [final_df['rank_'+str(workout_num)],final_df.competitorname,final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]]
vals_to_add = [final_df[m] for m in list(d.keys()) if m != 'rest']
vals.extend(vals_to_add)
final_df['scoredisplay_'+str(workout_num)],final_df['breakdown_'+str(workout_num)]
table_colors=gen_table_colors(final_df,rowEvenColor,rowOddColor)
fig_final = go.Figure(data=[go.Table(columnwidth=[1,1,1,1.5,1],header=dict(values=col_names,
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=vals,
line_color='darkslategray',
fill_color = [table_colors*6],
font = dict(size = 16),
align = ['center','left',"center"],
height=30))],)
fig_final.update_layout(margin=dict(l=10,r=10, b=10,t=10),width=1200)
final_df['Total Reps']=final_df['scoredisplay_'+str(workout_num)].apply(lambda x: int(x[:x.find(" ")]) if "reps" in x else total_reps)
#st.dataframe(final_df)
final_df=final_df.drop(columns=['rank_'+str(workout_num)])
avg_df = round(pd.DataFrame(final_df.mean(axis=0)))
finish=pd.DataFrame()
finish['Finishers']=[len(final_df)-len(final_df[final_df['scoredisplay_'+str(workout_num)].str.contains("reps")])]
finish['Average Finish Time']=[format_time(np.mean(final_df[final_df['scoredisplay_'+str(workout_num)].str.contains(":")]['scoredisplay_'+str(workout_num)].apply(lambda score: timedelta(minutes=int(score[:score.find(":")]),seconds=int(score[score.find(":")+1:])))))]
avg_df=avg_df.reset_index()
#st.dataframe(avg_df)
avg_df.columns = ['Movement','Average Reps']
#avg_df['Movement']=avg_df['Movement'].apply(lambda x: df_move[df_move['movement']==x]['label'].values[0])
fig_average = go.Figure(data=[go.Table(header=dict(values=["Movement","Average Reps"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[avg_df['Movement'],avg_df['Average Reps']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ['center'],
height=30))],layout=dict(height=calc_table_height(avg_df)-150))
fig_average.update_layout(margin=dict(l=10,r=10, b=10,t=10))
fig_finish = go.Figure(data=[go.Table(header=dict(values=["Finishers","Average Finish Time"],
fill_color=headerColor,
font=dict(color='white', size=18),
line_color='darkslategray',),
cells=dict(values=[finish['Finishers'],finish['Average Finish Time']],
line_color='darkslategray',
fill_color = [table_colors*2],
font = dict(size = 16),
align = ["center"],
height=30))],)
fig_finish.update_layout(margin=dict(l=10,r=10, b=10,t=10))
st.write("__Results__")
st.plotly_chart(fig_final)
st.write("__Average Reps Completed__")
st.plotly_chart(fig_average)
st.write("__Finisher Stats__")
st.plotly_chart(fig_finish)
#st.dataframe(final_df)
#st.dataframe(avg_df)
#st.dataframe(finish)
#df_2020['reps']=df_2020['scoredisplay_2'].apply(calc_total_reps)
#final_dict,movements,total_reps,time_domain,d = calc_total_reps(workout,score_data,df_rep,workout_num,gender,special)
| 52.226131
| 337
| 0.640142
| 2,881
| 20,786
| 4.366887
| 0.075321
| 0.063985
| 0.068198
| 0.065575
| 0.839202
| 0.822192
| 0.807249
| 0.801208
| 0.798903
| 0.790796
| 0
| 0.019955
| 0.189936
| 20,786
| 398
| 338
| 52.226131
| 0.727224
| 0.065188
| 0
| 0.700637
| 0
| 0
| 0.156332
| 0.001186
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003185
| false
| 0
| 0.031847
| 0
| 0.035032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88486fab6b0218096704809c0fdf0f7de4bca429
| 154
|
py
|
Python
|
{{cookiecutter.project_directory}}/{{cookiecutter.main_package_name}}/blueprints/health_check/__init__.py
|
langep/flask-api-basic
|
f9a46ec2e0b2642a345fcf613ac9148919f6279f
|
[
"MIT"
] | 1
|
2019-04-20T00:36:42.000Z
|
2019-04-20T00:36:42.000Z
|
{{cookiecutter.project_directory}}/{{cookiecutter.main_package_name}}/blueprints/health_check/__init__.py
|
langep/flask-api-basic
|
f9a46ec2e0b2642a345fcf613ac9148919f6279f
|
[
"MIT"
] | 4
|
2019-05-11T04:51:24.000Z
|
2019-05-11T04:54:08.000Z
|
{{cookiecutter.project_directory}}/{{cookiecutter.main_package_name}}/blueprints/health_check/__init__.py
|
langep/flask-api-basic
|
f9a46ec2e0b2642a345fcf613ac9148919f6279f
|
[
"MIT"
] | null | null | null |
"""Health check blueprint package."""
from {{cookiecutter.main_package_name}}.blueprints.health_check.views import health_check_blueprint # flake8: noqa
| 51.333333
| 115
| 0.811688
| 19
| 154
| 6.315789
| 0.684211
| 0.275
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007042
| 0.077922
| 154
| 2
| 116
| 77
| 0.838028
| 0.077922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
88766aa7f18ab7270470562ef7ebd131ae5dc460
| 11,815
|
py
|
Python
|
Tester.py
|
Waste-Wood/HGM-GIF
|
969b4c213360a5e47369c0072f9fe20ded0c1570
|
[
"MIT"
] | 2
|
2021-11-24T08:22:21.000Z
|
2021-12-10T12:27:13.000Z
|
Tester.py
|
Waste-Wood/HGM-GIF
|
969b4c213360a5e47369c0072f9fe20ded0c1570
|
[
"MIT"
] | null | null | null |
Tester.py
|
Waste-Wood/HGM-GIF
|
969b4c213360a5e47369c0072f9fe20ded0c1570
|
[
"MIT"
] | null | null | null |
import torch
import dgl
import os
from tools.utils import eval_label
from tools.logger import *
class TestPipLine():
def __init__(self, model, m, test_dir, limited):
"""
:param model: the model
:param m: the number of sentence to select
:param test_dir: for saving decode files
:param limited: for limited Recall evaluation
"""
self.model = model
self.limited = limited
self.m = m
self.test_dir = test_dir
self.extracts = []
self.batch_number = 0
self.running_loss = 0
self.example_num = 0
self.total_sentence_num = 0
self._hyps = []
self._refer = []
def evaluation(self, G, index, valset):
pass
def getMetric(self):
pass
def SaveDecodeFile(self):
import datetime
nowTime = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') # 现在
log_dir = os.path.join(self.test_dir, nowTime)
with open(log_dir, "wb") as resfile:
for i in range(self.rougePairNum):
resfile.write(b"[Reference]\t")
resfile.write(self._refer[i].encode('utf-8'))
resfile.write(b"\n")
resfile.write(b"[Hypothesis]\t")
resfile.write(self._hyps[i].encode('utf-8'))
resfile.write(b"\n")
resfile.write(b"\n")
resfile.write(b"\n")
@property
def running_avg_loss(self):
return self.running_loss / self.batch_number
@property
def rougePairNum(self):
return len(self._hyps)
@property
def hyps(self):
if self.limited:
hlist = []
for i in range(self.rougePairNum):
k = len(self._refer[i].split(" "))
lh = " ".join(self._hyps[i].split(" ")[:k])
hlist.append(lh)
return hlist
else:
return self._hyps
@property
def refer(self):
return self._refer
@property
def extractLabel(self):
return self.extracts
class SLTester(TestPipLine):
def __init__(self, model, m, test_dir=None, limited=False, blocking_win=3):
super().__init__(model, m, test_dir, limited)
self.pred, self.true, self.match, self.match_true = 0, 0, 0, 0
self._F = 0
self.criterion = torch.nn.CrossEntropyLoss(reduction='none')
self.blocking_win = blocking_win
def evaluation(self, G, index, dataset, blocking=False):
"""
:param G: the model
:param index: list, example id
:param dataset: dataset which includes text and summary
:param blocking: bool, for n-gram blocking
"""
self.batch_number += 1
outputs = self.model.forward(G)
# logger.debug(outputs)
snode_id = G.filter_nodes(lambda nodes: nodes.data["dtype"] == 1)
label = G.ndata["label"][snode_id].sum(-1) # [n_nodes]
G.nodes[snode_id].data["loss"] = self.criterion(outputs, label).unsqueeze(-1) # [n_nodes, 1]
loss = dgl.sum_nodes(G, "loss") # [batch_size, 1]
loss = loss.mean()
self.running_loss += float(loss.data)
G.nodes[snode_id].data["p"] = outputs
glist = dgl.unbatch(G)
for j in range(len(glist)):
idx = index[j]
example = dataset.get_example(idx)
original_article_sents = example.original_article_sents
sent_max_number = len(original_article_sents)
refer = example.original_abstract
g = glist[j]
snode_id = g.filter_nodes(lambda nodes: nodes.data["dtype"] == 1)
N = len(snode_id)
p_sent = g.ndata["p"][snode_id]
p_sent = p_sent.view(-1, 2) # [node, 2]
label = g.ndata["label"][snode_id].sum(-1).squeeze().cpu() # [n_node]
if self.m == 0:
prediction = p_sent.max(1)[1] # [node]
pred_idx = torch.arange(N)[prediction!=0].long()
else:
if blocking:
pred_idx = self.ngram_blocking(original_article_sents, p_sent[:,1], self.blocking_win, min(self.m, N))
else:
# print(p_sent.size())
topk, pred_idx = torch.topk(p_sent[:,1], min(self.m, N))
prediction = torch.zeros(N).long()
prediction[pred_idx] = 1
self.extracts.append(pred_idx.tolist())
self.pred += prediction.sum()
self.true += label.sum()
self.match_true += ((prediction == label) & (prediction == 1)).sum()
self.match += (prediction == label).sum()
self.total_sentence_num += N
self.example_num += 1
hyps = "\n".join(original_article_sents[id] for id in pred_idx if id < sent_max_number)
self._hyps.append(hyps)
self._refer.append(refer)
def getMetric(self):
logger.info("[INFO] Validset match_true %d, pred %d, true %d, total %d, match %d",
self.match_true, self.pred, self.true, self.total_sentence_num, self.match)
self._accu, self._precision, self._recall, self._F = eval_label(
self.match_true, self.pred, self.true, self.total_sentence_num, self.match)
logger.info(
"[INFO] The size of totalset is %d, sent_number is %d, accu is %f, precision is %f, recall is %f, F is %f",
self.example_num, self.total_sentence_num, self._accu, self._precision, self._recall, self._F)
def ngram_blocking(self, sents, p_sent, n_win, k):
"""
:param p_sent: [sent_num, 1]
:param n_win: int, n_win=2,3,4...
:return:
"""
ngram_list = []
_, sorted_idx = p_sent.sort(descending=True)
S = []
for idx in sorted_idx:
sent = sents[idx]
pieces = sent.split()
overlap_flag = 0
sent_ngram = []
for i in range(len(pieces) - n_win):
ngram = " ".join(pieces[i : (i + n_win)])
if ngram in ngram_list:
overlap_flag = 1
break
else:
sent_ngram.append(ngram)
if overlap_flag == 0:
S.append(idx)
ngram_list.extend(sent_ngram)
if len(S) >= k:
break
S = torch.LongTensor(S)
# print(sorted_idx, S)
return S
@property
def labelMetric(self):
return self._F
class SLTesterStock(TestPipLine):
def __init__(self, model, m, test_dir=None, limited=False, blocking_win=3):
super().__init__(model, m, test_dir, limited)
self.pred, self.true, self.match, self.match_true = 0, 0, 0, 0
self._F = 0
self.criterion = torch.nn.CrossEntropyLoss(reduction='none')
self.blocking_win = blocking_win
def evaluation(self, G, index, dataset, blocking=False):
"""
:param G: the model
:param index: list, example id
:param dataset: dataset which includes text and summary
:param blocking: bool, for n-gram blocking
"""
self.batch_number += 1
outputs, glen = self.model.forward(G)
# logger.debug(outputs)
snode_id = G.filter_nodes(lambda nodes: nodes.data["dtype"] == 1)
label = G.ndata["label"][snode_id].sum(-1) # [n_nodes]
final_label = []
for j in range(1, len(glen)):
tmp = label[glen[j-1]:glen[j]]
if tmp[0] == 1:
final_label.append(0)
else:
final_label.append(1)
final_label = torch.LongTensor(final_label).cuda()
# G.nodes[snode_id].data["loss"] = self.criterion(outputs, label).unsqueeze(-1) # [n_nodes, 1]
# loss = dgl.sum_nodes(G, "loss") # [batch_size, 1]
loss = self.criterion(outputs, final_label).mean()
self.running_loss += float(loss.data)
# G.nodes[snode_id].data["p"] = outputs
glist = dgl.unbatch(G)
for j in range(len(glist)):
idx = index[j]
example = dataset.get_example(idx)
original_article_sents = example.original_article_sents
sent_max_number = len(original_article_sents)
refer = example.original_abstract
g = glist[j]
snode_id = g.filter_nodes(lambda nodes: nodes.data["dtype"] == 1)
N = len(snode_id)
p_sent = g.ndata["p"][snode_id]
p_sent = p_sent.view(-1, 2) # [node, 2]
label = g.ndata["label"][snode_id].sum(-1).squeeze().cpu() # [n_node]
if self.m == 0:
prediction = p_sent.max(1)[1] # [node]
pred_idx = torch.arange(N)[prediction!=0].long()
else:
if blocking:
pred_idx = self.ngram_blocking(original_article_sents, p_sent[:,1], self.blocking_win, min(self.m, N))
else:
# print(p_sent.size())
topk, pred_idx = torch.topk(p_sent[:,1], min(self.m, N))
prediction = torch.zeros(N).long()
prediction[pred_idx] = 1
self.extracts.append(pred_idx.tolist())
self.pred += prediction.sum()
self.true += label.sum()
self.match_true += ((prediction == label) & (prediction == 1)).sum()
self.match += (prediction == label).sum()
self.total_sentence_num += N
self.example_num += 1
hyps = "\n".join(original_article_sents[id] for id in pred_idx if id < sent_max_number)
self._hyps.append(hyps)
self._refer.append(refer)
def getMetric(self):
logger.info("[INFO] Validset match_true %d, pred %d, true %d, total %d, match %d",
self.match_true, self.pred, self.true, self.total_sentence_num, self.match)
self._accu, self._precision, self._recall, self._F = eval_label(
self.match_true, self.pred, self.true, self.total_sentence_num, self.match)
logger.info(
"[INFO] The size of totalset is %d, sent_number is %d, accu is %f, precision is %f, recall is %f, F is %f",
self.example_num, self.total_sentence_num, self._accu, self._precision, self._recall, self._F)
def ngram_blocking(self, sents, p_sent, n_win, k):
"""
:param p_sent: [sent_num, 1]
:param n_win: int, n_win=2,3,4...
:return:
"""
ngram_list = []
_, sorted_idx = p_sent.sort(descending=True)
S = []
for idx in sorted_idx:
sent = sents[idx]
pieces = sent.split()
overlap_flag = 0
sent_ngram = []
for i in range(len(pieces) - n_win):
ngram = " ".join(pieces[i : (i + n_win)])
if ngram in ngram_list:
overlap_flag = 1
break
else:
sent_ngram.append(ngram)
if overlap_flag == 0:
S.append(idx)
ngram_list.extend(sent_ngram)
if len(S) >= k:
break
S = torch.LongTensor(S)
# print(sorted_idx, S)
return S
@property
def labelMetric(self):
return self._F
| 37.990354
| 123
| 0.52628
| 1,440
| 11,815
| 4.135417
| 0.125694
| 0.016793
| 0.033585
| 0.030227
| 0.814106
| 0.809068
| 0.8
| 0.8
| 0.791604
| 0.791604
| 0
| 0.010223
| 0.354211
| 11,815
| 310
| 124
| 38.112903
| 0.770249
| 0.086077
| 0
| 0.741379
| 0
| 0.017241
| 0.046169
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081897
| false
| 0.008621
| 0.025862
| 0.025862
| 0.163793
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee1cd77b81524c2b31402cc3fa64caa75aa12b40
| 122
|
py
|
Python
|
1. python-course-udemy/desafio_pacotes/app/utils/gerador.py
|
karlscode/python-basics
|
90f215de323f907cb692369b87c34659ba49f1d2
|
[
"MIT"
] | null | null | null |
1. python-course-udemy/desafio_pacotes/app/utils/gerador.py
|
karlscode/python-basics
|
90f215de323f907cb692369b87c34659ba49f1d2
|
[
"MIT"
] | null | null | null |
1. python-course-udemy/desafio_pacotes/app/utils/gerador.py
|
karlscode/python-basics
|
90f215de323f907cb692369b87c34659ba49f1d2
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
from random import choice
def novo_nome():
return choice(['Ana', 'Maria', 'Pedro', 'Rafael'])
| 13.555556
| 54
| 0.639344
| 16
| 122
| 4.8125
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.172131
| 122
| 8
| 55
| 15.25
| 0.752475
| 0.147541
| 0
| 0
| 0
| 0
| 0.184466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c9dc1c7cd87a5cb6fcba0e508fb2bf0aeb7fe00d
| 33,976
|
py
|
Python
|
backend/api/test_api_simple.py
|
ActionAnalytics/tfrs
|
83e1805312d3f13c6a7235e99840b44f399c8fde
|
[
"Apache-2.0"
] | null | null | null |
backend/api/test_api_simple.py
|
ActionAnalytics/tfrs
|
83e1805312d3f13c6a7235e99840b44f399c8fde
|
[
"Apache-2.0"
] | null | null | null |
backend/api/test_api_simple.py
|
ActionAnalytics/tfrs
|
83e1805312d3f13c6a7235e99840b44f399c8fde
|
[
"Apache-2.0"
] | null | null | null |
"""
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
from django.test import TestCase
from django.test import Client
import django
from rest_framework import status
from . import fakedata
from .serializers import CreditTradeStatusSerializer
from .serializers import CreditTradeTypeSerializer
from .serializers import CreditTradeZeroReasonSerializer
from .serializers import OrganizationActionsTypeSerializer
from .serializers import OrganizationStatusSerializer
from .serializers import PermissionSerializer
from .serializers import RoleSerializer
from .serializers import UserSerializer
# Simple API test cases.
# If an API operation contains generated code and requires a simple model object
# (one that is not complex, containing child items) then it is tested in this
# file.
#
# See the file test_api_complex.py for other test cases, which must be hand
# written.
class Test_Api_Simple(TestCase):
fixtures = ['organization_types.json',
'organization_government.json',
'organization_balance_gov.json',
'credit_trade_statuses.json',
'credit_trade_statuses_refused.json',
'organization_actions_types.json',
'organization_statuses.json',
'credit_trade_types.json',
'test_organization_fuel_suppliers.json',
'test_users.json',
]
def setUp(self):
# Every test needs a client.
self.client = Client(
HTTP_SMGOV_USERGUID='c9804c52-05f1-4a6a-9d24-332d9d8be2a9',
HTTP_SMAUTH_USERDISPLAYNAME='Brad Smith',
HTTP_SMGOV_USEREMAIL='BradJSmith@cuvox.de',
HTTP_SM_UNIVERSALID='BSmith')
# needed to setup django
django.setup()
def test_credittradestatusesBulkPost(self):
# Test Bulk Load.
payload = fakedata.CreditTradeStatusTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/credittradestatuses/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_credittradestatusesGet(self):
# Test Create and List operations.
testUrl = "/api/credittradestatuses"
# Create:
serializer_class = CreditTradeStatusSerializer
payload = fakedata.CreditTradeStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradestatusesIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradestatuses/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.CreditTradeStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradestatusesIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradestatuses/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.CreditTradeStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.CreditTradeStatusTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradetypesBulkPost(self):
# Test Bulk Load.
payload = fakedata.CreditTradeTypeTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/credittradetypes/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_credittradetypesGet(self):
# Test Create and List operations.
testUrl = "/api/credittradetypes"
# Create:
serializer_class = CreditTradeTypeSerializer
payload = fakedata.CreditTradeTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradetypesIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradetypes/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.CreditTradeTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradetypesIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradetypes/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.CreditTradeTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.CreditTradeTypeTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradezeroreasonBulkPost(self):
# Test Bulk Load.
payload = fakedata.CreditTradeZeroReasonTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/credittradezeroreason/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_credittradezeroreasonGet(self):
# Test Create and List operations.
testUrl = "/api/credittradezeroreason"
# Create:
serializer_class = CreditTradeZeroReasonSerializer
payload = fakedata.CreditTradeZeroReasonTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradezeroreasonIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradezeroreason/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.CreditTradeZeroReasonTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_credittradezeroreasonIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/credittradezeroreason/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.CreditTradeZeroReasonTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.CreditTradeZeroReasonTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationactionstypesBulkPost(self):
# Test Bulk Load.
payload = fakedata.OrganizationActionsTypeTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/organization_actions_types/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_organizationactionstypesGet(self):
# Test Create and List operations.
testUrl = "/api/organization_actions_types"
# Create:
serializer_class = OrganizationActionsTypeSerializer
payload = fakedata.OrganizationActionsTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationactionstypesIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/organization_actions_types/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.OrganizationActionsTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationactionstypesIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/organization_actions_types/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.OrganizationActionsTypeTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.OrganizationActionsTypeTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationstatusesBulkPost(self):
# Test Bulk Load.
payload = fakedata.OrganizationStatusTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/organization_statuses/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_organizationstatusesGet(self):
# Test Create and List operations.
testUrl = "/api/organization_statuses"
# Create:
serializer_class = OrganizationStatusSerializer
payload = fakedata.OrganizationStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationstatusesIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/organization_statuses/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.OrganizationStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_organizationstatusesIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/organization_statuses/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.OrganizationStatusTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.OrganizationStatusTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_permissionsBulkPost(self):
# Test Bulk Load.
payload = fakedata.PermissionTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/permissions/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_permissionsGet(self):
# Test Create and List operations.
testUrl = "/api/permissions"
# Create:
serializer_class = PermissionSerializer
payload = fakedata.PermissionTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_permissionsIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/permissions/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.PermissionTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_permissionsIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/permissions/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.PermissionTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.PermissionTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_rolesBulkPost(self):
# Test Bulk Load.
payload = fakedata.RoleTestDataCreate()
jsonString = "[]"
response = self.client.post('/api/roles/bulk',
content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_201_CREATED == response.status_code
def test_rolesGet(self):
# Test Create and List operations.
testUrl = "/api/roles"
# Create:
serializer_class = RoleSerializer
payload = fakedata.RoleTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_rolesIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/roles/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.RoleTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_rolesIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/roles/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.RoleTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.RoleTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
# def test_usersBulkPost(self):
# # Test Bulk Load.
# payload = fakedata.UserTestDataCreate()
# jsonString = "[]"
# response = self.client.post('/api/users/bulk',
# content_type='application/json',
# data=jsonString)
# # Check that the response is 200 OK.
# print(response.status_code)
# print(response.content.decode("utf-8"))
# assert status.HTTP_201_CREATED == response.status_code
def test_usersGet(self):
# Test Create and List operations.
testUrl = "/api/users"
# Create:
serializer_class = UserSerializer
payload = fakedata.UserTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(testUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# List:
response = self.client.get(testUrl)
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = testUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_usersIdDeletePost(self):
# Test Retrieve and Update operations.
testUrl = "/api/users/(?P<id>[0-9]+)/delete"
createUrl = testUrl.replace("/(?P<id>[0-9]+)/delete", "")
# Create an object:
payload = fakedata.UserTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
deleteUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
def test_usersIdGet(self):
# Test Retrieve and Update operations.
testUrl = "/api/users/(?P<id>[0-9]+)"
createUrl = testUrl.replace("/(?P<id>[0-9]+)", "")
# Create an object:
payload = fakedata.UserTestDataCreate()
jsonString = json.dumps(payload)
response = self.client.post(createUrl, content_type='application/json',
data=jsonString)
# Check that the response is OK.
assert status.HTTP_201_CREATED == response.status_code
# parse the response.
jsonString = response.content.decode("utf-8")
data = json.loads(jsonString)
createdId = data['id']
# Update the object:
updateUrl = testUrl.replace("(?P<id>[0-9]+)", str(createdId))
payload = fakedata.UserTestDataUpdate()
jsonString = json.dumps(payload)
response = self.client.put(updateUrl, content_type='application/json',
data=jsonString)
# print(response.status_code)
# print(response.content.decode("utf-8"))
# Check that the response is 200 OK.
assert status.HTTP_200_OK == response.status_code
# Cleanup:
deleteUrl = createUrl + "/" + str(createdId) + "/delete"
response = self.client.post(deleteUrl)
# Check that the response is OK.
assert status.HTTP_204_NO_CONTENT == response.status_code
if __name__ == '__main__':
unittest.main()
| 45.361816
| 208
| 0.620997
| 3,549
| 33,976
| 5.839955
| 0.074669
| 0.050951
| 0.064267
| 0.069478
| 0.827222
| 0.826836
| 0.804159
| 0.799624
| 0.787755
| 0.78264
| 0
| 0.017768
| 0.277755
| 33,976
| 748
| 209
| 45.42246
| 0.826847
| 0.179862
| 0
| 0.766529
| 0
| 0
| 0.100797
| 0.050543
| 0
| 0
| 0
| 0
| 0.146694
| 1
| 0.066116
| false
| 0
| 0.028926
| 0
| 0.099174
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9f17f4ccdae4b759daec0b0e980cebf0bbb7968
| 3,113
|
py
|
Python
|
BotBase/methods/various.py
|
pokurt/BotBase
|
be12ade2365d539a6abd319a1a1185fc27ac97f2
|
[
"Apache-2.0"
] | 1
|
2020-12-13T06:50:35.000Z
|
2020-12-13T06:50:35.000Z
|
BotBase/methods/various.py
|
pokurt/BotBase
|
be12ade2365d539a6abd319a1a1185fc27ac97f2
|
[
"Apache-2.0"
] | 1
|
2020-07-19T20:06:14.000Z
|
2020-07-19T20:06:14.000Z
|
BotBase/methods/various.py
|
pokurt/BotBase
|
be12ade2365d539a6abd319a1a1185fc27ac97f2
|
[
"Apache-2.0"
] | 1
|
2021-04-22T18:47:38.000Z
|
2021-04-22T18:47:38.000Z
|
from pyrogram.errors import RPCError, FloodWait
import time
from pyrogram import CallbackQuery
import logging
def answer(query: CallbackQuery, sleep: bool = True, *args, **kwargs):
"""Answers a query in a way that never triggers exceptions and logs errors
:param query: The pyrogram.CallbackQuery object to call the method for
:type query: class: CallbackQuery
:param sleep: If True, the default, the function will call time.sleep()
in case of a FloodWait exception and return the exception object
after the sleep is done, otherwise the ``FloodWait`` exception is returned
immediately
:returns: Whatever the called pyrogram method returns, or an exception if
the method call caused an error
"""
try:
return query.answer(*args, **kwargs)
except FloodWait as fw:
logging.warning(f"FloodWait! A wait of {fw.x} seconds is required")
if sleep:
time.sleep(fw.x)
return fw
except RPCError as generic_error:
logging.error(f"An exception occurred: {generic_error}")
return generic_error
def delete_messages(client, sleep: bool = True, *args, **kwargs):
"""Deletes messages in a way that never triggers exceptions and logs errors
:param client: The pyrogram.Client instance to call the method for
:type client: class: Client
:param sleep: If True, the default, the function will call time.sleep()
in case of a FloodWait exception and return the exception object
after the sleep is done, otherwise the ``FloodWait`` exception is returned
immediately
:returns: Whatever the called pyrogram method returns, or an exception if
the method call caused an error
"""
try:
return client.delete_messages(*args, **kwargs)
except FloodWait as fw:
logging.warning(f"FloodWait! A wait of {fw.x} seconds is required")
if sleep:
time.sleep(fw.x)
return fw
except RPCError as generic_error:
logging.error(f"An exception occurred: {generic_error}")
return generic_error
def get_users(client, sleep: bool = True, *args, **kwargs):
"""Calls get_users in a way that never triggers exceptions and logs errors
:param client: The pyrogram.Client instance to call the method for
:type client: class: Client
:param sleep: If True, the default, the function will call time.sleep()
in case of a FloodWait exception and return the exception object
after the sleep is done, otherwise the ``FloodWait`` exception is returned
immediately
:returns: Whatever the called pyrogram method returns, or an exception if
the method call caused an error
"""
try:
return client.get_users(*args, **kwargs)
except FloodWait as fw:
logging.warning(f"FloodWait! A wait of {fw.x} seconds is required")
if sleep:
time.sleep(fw.x)
return fw
except RPCError as generic_error:
logging.error(f"An exception occurred: {generic_error}")
return generic_error
| 38.9125
| 81
| 0.679088
| 420
| 3,113
| 5
| 0.183333
| 0.051429
| 0.018571
| 0.024286
| 0.868571
| 0.857619
| 0.819524
| 0.819524
| 0.819524
| 0.819524
| 0
| 0
| 0.25249
| 3,113
| 79
| 82
| 39.405063
| 0.90245
| 0.483135
| 0
| 0.72973
| 0
| 0
| 0.180851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081081
| false
| 0
| 0.108108
| 0
| 0.432432
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a01a6327026d1cf7a231d112cc34983e02791e14
| 128
|
py
|
Python
|
examples/fact_tail.py
|
joeldentici/python_stepper
|
ab32c62d0d0333ad901d7329fb198c7a23988007
|
[
"MIT"
] | 1
|
2020-11-29T20:00:39.000Z
|
2020-11-29T20:00:39.000Z
|
examples/fact_tail.py
|
joeldentici/python_stepper
|
ab32c62d0d0333ad901d7329fb198c7a23988007
|
[
"MIT"
] | null | null | null |
examples/fact_tail.py
|
joeldentici/python_stepper
|
ab32c62d0d0333ad901d7329fb198c7a23988007
|
[
"MIT"
] | null | null | null |
def fact_acc(n, acc):
return acc if n < 2 else fact_acc(n - 1, acc * n)
def fact(n):
return fact_acc(n, 1)
result = fact(5)
| 16
| 50
| 0.632813
| 28
| 128
| 2.785714
| 0.392857
| 0.205128
| 0.307692
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04
| 0.21875
| 128
| 7
| 51
| 18.285714
| 0.74
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
a0203c97bb780c220ebbaa89371228dc339843cd
| 23,533
|
py
|
Python
|
libraries/opt_lib.py
|
JIMonroe/Surface_Affinities_Optimization
|
94853571c690b099362431aac32d26611134a009
|
[
"MIT"
] | null | null | null |
libraries/opt_lib.py
|
JIMonroe/Surface_Affinities_Optimization
|
94853571c690b099362431aac32d26611134a009
|
[
"MIT"
] | null | null | null |
libraries/opt_lib.py
|
JIMonroe/Surface_Affinities_Optimization
|
94853571c690b099362431aac32d26611134a009
|
[
"MIT"
] | 1
|
2021-03-07T11:52:27.000Z
|
2021-03-07T11:52:27.000Z
|
#A library of genetic algorithm optimization procedures
import sys, os
import shutil
import subprocess
import multiprocessing
from datetime import datetime
import time
import pickle
import glob
import copy
import numpy as np
import scipy.stats as stats
import scipy.optimize as optimize
import pytraj as pt
import parmed as pmd
import waterlib as wl
from pymbar import mbar
from genetic_lib import *
#Defines a genetic algorithm optimization of self-assembled monolayer surfaces
#A lot of commenting out has been done - this is mainly to remove the clustering step
#Not removing commented out lines for now just to show how would do clustering if at any point wanted to
def doOptSAM(numSurfOH, genMax, randomGens, metricFunc, metricFuncArgs={}, findMax=False, CH3chainFilePrefix='/home/jmonroe/Surface_Affinities_Project/FFfiles/ctmSAM', OHchainFilePrefix='/home/jmonroe/Surface_Affinities_Project/FFfiles/otmSAM'):
Usage = """
Uses genetic algorithms to optimize solute solvation free energy near a SAM surface by
adjusting the locations of hydroxyl-terminated chains at fixed density. This
is done by treating each chain lattice location as a 0 (CH3 chain) or 1 (OH
chain). The resulting boolean array specifies where the OH chains are
located on the standard lattice structure, with a mappping assumed such that
point (0,0)->0 in the array, (0,1)->1, (0,2)->2, ... (N,M)->N*M+M. The
genetic algorithm uses binary tournament selection in order to pick parents.
Generation and mutation steps follow.
One must select the way in which fitness metrics are calculated by providing
a function object to doOpt that performs this calculation. This allows for
simple switching between molecular dynamics and machine-learned functions
to evaluate fitness.
numSurfOH - total number of hydroxyls (per side) for desired density
genMax - the number of generations that will be produced
randomGens - the number of generations before optimization starts (random generations)
metricFunc - A function object that will be used to obtain the fitness metric.
This MUST take a list of SAM structure objects as input.
It MUST output a list of fitness metrics of the same as the input.
metricFuncArgs - (default empty dictionary) A dictionary of keyword arguments defining other
parameters for the metric function object to use. Using keyword arguments
to force the user to be very explicit and automatically check to make sure
variables match up.
findMax - (default False) whether or not to optimizes to maximum (True) versus minimum (False)
CH3chainFilePrefix - (optional) file prefix specifying the location before .top
and .gro suffixes for the CH3-terminated single chain
OHchainFilePrefix - (optional) file prefix for the OH-terminated chain
"""
#Set up directory structure
try:
os.mkdir('structures') #Holds all surface structures tested
except OSError:
pass
chainsX = 6
chainsY = 8 #Number of chains along x and y dimensions of SAM lattice
OHdens = np.sum(numSurfOH) / 10.06 #hydroxyl surface density (approximately)
genSize = 8 #size of each generation (for performing simulations, calculating fitness metrics)
#Also sets number of top performers to consider for evolution
MutRate = 0.06 #6% mutation rate
Nmods = int(MutRate*chainsX*chainsY/2.0)
if Nmods < 1:
Nmods = 1
print(str(datetime.now()))
print("Optimization of surface solute solvation free energy:")
print("Run parameters:")
if findMax:
print(" Type of optimization: Max")
else:
print(" Type of optimization: Min")
print(" Approximate surface density: %3.2f OH/nm^2" % OHdens)
print(" Generation size: %i" % genSize)
print(" Mutation rate: %i per surface" % Nmods)
print(" Maximum number of generations: %i" % genMax)
#Read in CH3 and OH chain structure files and put in list
CH3chain = pmd.load_file(CH3chainFilePrefix+'.top', xyz=CH3chainFilePrefix+'_tilted.gro')
OHchain = pmd.load_file(OHchainFilePrefix+'.top', xyz=OHchainFilePrefix+'_tilted.gro')
chainStructs = [CH3chain, OHchain]
#Create lists to keep track of tested structures
try:
with open('structure_library.pkl', 'r') as infile:
allStructs = pickle.load(infile)
except IOError:
allStructs = []
#Also create list for all solvation fitness metrics o use every time we sort
allMetrics = [struct.metric for struct in allStructs]
#Also create temporary lists for current working structures and fitness metrics
#Allow for restarts, though
if len(allStructs) == 0:
#Restarts should work, but currently cannot seed with specified starting structures
#unless have already run them to get fitness metrics and set up structure classes
#Until then, just have to get through random generation of surfaces at least once
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, [], 0, numSurfOH, chainsX, chainsY, doMerge=False)
currMetrics = np.zeros(genSize).tolist()
genCount = 0
else:
#Need to make sure structures sorted by fitness metric, smallest to largest
combinedList = sorted(zip(allMetrics, allStructs))
allStructs = [x for (y,x) in combinedList]
allMetrics = [x for (x,y) in combinedList]
genCount = np.max([struct.gen for struct in allStructs])
#Pick current set of indices to use as parents for next generation
#Do this by clustering, sorting, then tournament selection
#Not doing clustering anymore!
#Instead changing process... more initial random structures, then pooling multiple short optimizations
# clustList = clusterSurfs(allStructs, 'SAM', 0.42, MaxIter=200, MaxCluster=-32,
# MaxClusterWork=None, Verbose=False)
# #Note that forcing configurations into MAX of 32 clusters
# #setting the cutoff to 0.42 seems to provide good clustering, based on some experience
# for k, aclust in enumerate(clustList):
# thisMetrics = [asurf.metric for asurf in aclust]
# thisCombinedList = sorted(zip(thisMetrics, aclust))
# clustList[k] = [x for (y,x) in thisCombinedList]
# #May not have 32 clusters, so fill out 32-member bracket by looping through clusters
# #Note that 32 = genSize*bracketSize*2 -> seems to work well for creating some diversity but also drive
# bracketSurfs = []
# for k in range(len(clustList[0])): #zeroth should be biggest cluster
# for aclust in clustList:
# if len(aclust) > k:
# if findMax:
# bracketSurfs.append(aclust[-(k+1)])
# else:
# bracketSurfs.append(aclust[k])
# if len(bracketSurfs) >= 32:
# break
# #Fancy break structure below breaks out of outer loop if break in inner loop triggered
# else:
# continue
# break
# print("After clustering and sorting, have %i surfaces going into bracket" % len(bracketSurfs))
if findMax:
bracketSurfs = allStructs[-32:]
else:
bracketSurfs = allStructs[:32]
bracketMetrics = [asurf.metric for asurf in bracketSurfs]
currInds = tournamentSelect(bracketMetrics, Nparents=genSize, bracketSize=2, optMax=findMax)
#Below uses bracket size proportional to population size
#currInds = tournamentSelect(allMetrics, Nparents=genSize, bracketSize=None, optMax=findMax)
currStructs = [bracketSurfs[x] for x in currInds]
currMetrics = [bracketMetrics[x] for x in currInds]
print("From up to generation %i, have chosen following parents:" % genCount)
for struct in currStructs:
print("%s, %f, " % (struct.topFile, struct.metric))
genCount += 1
print("\nSet-up finished. Beginning genetic algorithm optimization.")
#Loop until stopping criteria reached, for now number of iterations/generations
while genCount <= genMax:
if genCount != 0:
#If at zeroth generation, already handled above, otherwise, check if this generation
#should be random or driven by genetic algorithm
if genCount >= randomGens:
#Need to take current surfaces and produce new generation
#Assumes currStructs contains overall most fit candidates only
#Newly generated surfaces will be simulated and fitness metric determined
#Then sorting will be re-performed and currStructs updated
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, currStructs, genCount,
numSurfOH, chainsX, chainsY, doMerge=True)
else:
#If not past randomGens, need to generate random surfaces
#Should always be done at least once, i.e. randomGens should be greater than zero
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, [], genCount,
numSurfOH, chainsX, chainsY, doMerge=False)
#Evaluate the metric function for each surface in currStructs to update currMetrics
currMetrics = metricFunc(currStructs, **metricFuncArgs)
if isinstance(currMetrics, np.ndarray):
currMetrics = currMetrics.tolist()
for k, astruct in enumerate(currStructs):
astruct.metric = currMetrics[k]
#Add current structures and fitness metric to total list
allStructs = allStructs + currStructs
allMetrics = allMetrics + currMetrics
#Sort by the fitness metric of interest
combinedList = sorted(zip(allMetrics, allStructs))
allStructs = [x for (y,x) in combinedList]
allMetrics = [x for (x,y) in combinedList]
#Save current structure library information (contains fitness metrics)
with open('structure_library.pkl', 'w') as infile:
pickle.dump(allStructs, infile)
#Select indices to use as parents for next generation
#Do this by clustering, sorting, then tournament selection
#Not doing clustering anymore!
#Instead changing process... more initial random structures, then pooling multiple short optimizations
# clustList = clusterSurfs(allStructs, 'SAM', 0.42, MaxIter=200, MaxCluster=-32,
# MaxClusterWork=None, Verbose=False)
# #Note that forcing configurations into MAX of 32 clusters
# #setting the cutoff to 0.42 seems to provide good clustering, based on some experience
# for k, aclust in enumerate(clustList):
# thisMetrics = [asurf.metric for asurf in aclust]
# thisCombinedList = sorted(zip(thisMetrics, aclust))
# clustList[k] = [x for (y,x) in thisCombinedList]
# #May not have 32 clusters, so fill out 32-member bracket by looping through clusters
# #Note that 32 = genSize*bracketSize*2 -> seems to work well for creating some diversity but also drive
# bracketSurfs = []
# for k in range(len(clustList[0])): #zeroth should be biggest cluster
# for aclust in clustList:
# if len(aclust) > k:
# if findMax:
# bracketSurfs.append(aclust[-(k+1)])
# else:
# bracketSurfs.append(aclust[k])
# if len(bracketSurfs) >= 32:
# break
# #Fancy break structure below breaks out of outer loop if break in inner loop triggered
# else:
# continue
# break
# print("After clustering and sorting, have %i surfaces going into bracket" % len(bracketSurfs))
if findMax:
bracketSurfs = allStructs[-32:]
else:
bracketSurfs = allStructs[:32]
bracketMetrics = [asurf.metric for asurf in bracketSurfs]
currInds = tournamentSelect(bracketMetrics, Nparents=genSize, bracketSize=2, optMax=findMax)
#Below uses bracket size proportional to population size instead
#currInds = tournamentSelect(allMetrics, Nparents=genSize, bracketSize=None, optMax=findMax)
currStructs = [bracketSurfs[x] for x in currInds]
currMetrics = [bracketMetrics[x] for x in currInds]
print("From up to generation %i, have chosen following parents:" % genCount)
for struct in currStructs:
print("%s, %f, " % (struct.topFile, struct.metric))
if findMax:
print("Current optimum (max) solvation free energy at generation %i: %f" % (genCount, allMetrics[-1]))
print("From structure: %s (%f)\n" % (allStructs[-1].topFile, allStructs[-1].metric))
else:
print("Current optimum (min) solvation free energy at generation %i: %f" % (genCount, allMetrics[0]))
print("From structure: %s (%f)\n" % (allStructs[0].topFile, allStructs[0].metric))
genCount += 1
#Now a function that handles charged SAM head-groups and a neutral background head-group
def doOptSAMcharged(numSurfCharged, genMax, randomGens, metricFunc, metricFuncArgs={}, findMax=False, NeutralchainFilePrefix='/home/jmonroe/Surface_Affinities_Project/FFfiles/ctmSAM', NegchainFilePrefix='/home/jmonroe/Surface_Affinities_Project/FFfiles/stmSAM', PoschainFilePrefix='/home/jmonroe/Surface_Affinities_Project/FFfiles/ntmSAM'):
Usage = """
Uses genetic algorithms to optimize solute solvation free energy near a SAM surface by
adjusting the locations of two types of charged chains at fixed density. Neutral chain
lattice locations are treated as 0 (neutral chain), while negative (sulfonate chains)
are 1 and positive (quaternary ammonium) 2. The resulting integer array specifies where
the charged chains are located on the standard lattice structure, with a mappping
assumed such that point (0,0)->0 in the array, (0,1)->1, (0,2)->2, ... (N,M)->N*M+M.
The genetic algorithm uses binary tournament selection in order to pick parents.
Generation and mutation steps follow.
One must select the way in which fitness metrics are calculated by providing
a function object to doOpt that performs this calculation. This allows for
simple switching between molecular dynamics and machine-learned functions
to evaluate fitness.
numSurfCharged - total number of charged headgroups (per side) for desired density
SHOULD be length two array if you want to be really precise.
Does NOT assume that the head-groups are of equivalent charge or
that charge neutrality is maintained, so BE CAREFUL.
This provides flexibility to use different net charges on
positive/negative headgroups, but also makes it possible to
SCREW UP REALLY BADLY.
If it's just a float or length 1 array, it WILL be assumed that
you want the same number of both charged chain types, REGARDLESS
OF CHARGE.
genMax - the number of generations that will be produced
randomGens - the number of generations before optimization starts (random generations)
metricFunc - A function object that will be used to obtain the fitness metric.
This MUST take a list of SAM structure objects as input.
It MUST output a list of fitness metrics of the same as the input.
metricFuncArgs - (default empty dictionary) A dictionary of keyword arguments defining other
parameters for the metric function object to use. Using keyword arguments
to force the user to be very explicit and automatically check to make sure
variables match up.
findMax - (default False) whether or not to optimizes to maximum (True) versus minimum (False)
NeutralchainFilePrefix - (optional) file prefix specifying the location before .top
and .gro suffixes for the neutral-terminated single chain
NegchainFilePrefix - (optional) file prefix for the chain with negative termination
PoschainFilePrefix - (optional) file prefix for the chain with positive termination
"""
#Set up directory structure
try:
os.mkdir('structures') #Holds all surface structures tested
except OSError:
pass
chainsX = 6
chainsY = 8 #Number of chains along x and y dimensions of SAM lattice
ChargeDens = np.array([numSurfCharged]).flatten() / 10.06 #hydroxyl surface density (approximately)
genSize = 8 #size of each generation (for performing simulations, calculating fitness metrics)
#Also sets number of top performers to consider for evolution
MutRate = 0.06 #6% mutation rate
Nmods = int(MutRate*chainsX*chainsY/2.0)
if Nmods < 1:
Nmods = 1
print(str(datetime.now()))
print("Optimization of surface solute solvation free energy:")
print("Run parameters:")
if findMax:
print(" Type of optimization: Max")
else:
print(" Type of optimization: Min")
if len(ChargeDens == 1):
print(" Have specified single density for all charged chain types of approximately: %3.2f 1/nm^2" % ChargeDens[0])
elif len(ChargeDens == 2):
print(" Approximate surface density for first charged chain type (default negative): %3.2f 1/nm^2" % ChargeDens[0])
print(" Approximate surface density for second charged chain type (default positive): %3.2f 1/nm^2" % ChargeDens[1])
else:
print(" Have specified number of chains for more than two chain types - only supports two charged chain types.")
sys.exit(2)
print(" Generation size: %i" % genSize)
print(" Mutation rate: %i per surface" % Nmods)
print(" Maximum number of generations: %i" % genMax)
#Read in CH3 and OH chain structure files and put in list
neutralChain = pmd.load_file(NeutralchainFilePrefix+'.top', xyz=NeutralchainFilePrefix+'_tilted.gro')
negChain = pmd.load_file(NegchainFilePrefix+'.top', xyz=NegchainFilePrefix+'_tilted.gro')
posChain = pmd.load_file(PoschainFilePrefix+'.top', xyz=PoschainFilePrefix+'_tilted.gro')
chainStructs = [neutralChain, negChain, posChain]
#Create lists to keep track of tested structures
try:
with open('structure_library.pkl', 'r') as infile:
allStructs = pickle.load(infile)
except IOError:
allStructs = []
#Also create list for all solvation fitness metrics o use every time we sort
allMetrics = [struct.metric for struct in allStructs]
#Also create temporary lists for current working structures and fitness metrics
#Allow for restarts, though
if len(allStructs) == 0:
#Restarts should work, but currently cannot seed with specified starting structures
#unless have already run them to get fitness metrics and set up structure classes
#Until then, just have to get through random generation of surfaces at least once
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, [], 0, numSurfCharged, chainsX, chainsY, doMerge=False)
currMetrics = np.zeros(genSize).tolist()
genCount = 0
else:
#Need to make sure structures sorted by fitness metric, smallest to largest
combinedList = sorted(zip(allMetrics, allStructs))
allStructs = [x for (y,x) in combinedList]
allMetrics = [x for (x,y) in combinedList]
genCount = np.max([struct.gen for struct in allStructs])
#Pick current set of indices to use as parents for next generation
#Do this by clustering, sorting, then tournament selection
#Not doing clustering anymore!
#Instead changing process... more initial random structures, then pooling multiple short optimizations
if findMax:
bracketSurfs = allStructs[-32:]
else:
bracketSurfs = allStructs[:32]
bracketMetrics = [asurf.metric for asurf in bracketSurfs]
currInds = tournamentSelect(bracketMetrics, Nparents=genSize, bracketSize=2, optMax=findMax)
#Below uses bracket size proportional to population size
#currInds = tournamentSelect(allMetrics, Nparents=genSize, bracketSize=None, optMax=findMax)
currStructs = [bracketSurfs[x] for x in currInds]
currMetrics = [bracketMetrics[x] for x in currInds]
print("From up to generation %i, have chosen following parents:" % genCount)
for struct in currStructs:
print("%s, %f, " % (struct.topFile, struct.metric))
genCount += 1
print("\nSet-up finished. Beginning genetic algorithm optimization.")
#Loop until stopping criteria reached, for now number of iterations/generations
while genCount <= genMax:
if genCount != 0:
#If at zeroth generation, already handled above, otherwise, check if this generation
#should be random or driven by genetic algorithm
if genCount >= randomGens:
#Need to take current surfaces and produce new generation
#Assumes currStructs contains overall most fit candidates only
#Newly generated surfaces will be simulated and fitness metric determined
#Then sorting will be re-performed and currStructs updated
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, currStructs, genCount,
numSurfCharged, chainsX, chainsY, doMerge=True)
else:
#If not past randomGens, need to generate random surfaces
#Should always be done at least once, i.e. randomGens should be greater than zero
currStructs = genSurfsSAM(chainStructs, genSize, Nmods, [], genCount,
numSurfCharged, chainsX, chainsY, doMerge=False)
#Evaluate the metric function for each surface in currStructs to update currMetrics
currMetrics = metricFunc(currStructs, **metricFuncArgs)
if isinstance(currMetrics, np.ndarray):
currMetrics = currMetrics.tolist()
for k, astruct in enumerate(currStructs):
astruct.metric = currMetrics[k]
#Add current structures and fitness metric to total list
allStructs = allStructs + currStructs
allMetrics = allMetrics + currMetrics
#Sort by the fitness metric of interest
combinedList = sorted(zip(allMetrics, allStructs))
allStructs = [x for (y,x) in combinedList]
allMetrics = [x for (x,y) in combinedList]
#Save current structure library information (contains fitness metrics)
with open('structure_library.pkl', 'w') as infile:
pickle.dump(allStructs, infile)
#Select indices to use as parents for next generation
#Do this by clustering, sorting, then tournament selection
#Not doing clustering anymore!
#Instead changing process... more initial random structures, then pooling multiple short optimizations
if findMax:
bracketSurfs = allStructs[-32:]
else:
bracketSurfs = allStructs[:32]
bracketMetrics = [asurf.metric for asurf in bracketSurfs]
currInds = tournamentSelect(bracketMetrics, Nparents=genSize, bracketSize=2, optMax=findMax)
#Below uses bracket size proportional to population size instead
#currInds = tournamentSelect(allMetrics, Nparents=genSize, bracketSize=None, optMax=findMax)
currStructs = [bracketSurfs[x] for x in currInds]
currMetrics = [bracketMetrics[x] for x in currInds]
print("From up to generation %i, have chosen following parents:" % genCount)
for struct in currStructs:
print("%s, %f, " % (struct.topFile, struct.metric))
if findMax:
print("Current optimum (max) solvation free energy at generation %i: %f" % (genCount, allMetrics[-1]))
print("From structure: %s (%f)\n" % (allStructs[-1].topFile, allStructs[-1].metric))
else:
print("Current optimum (min) solvation free energy at generation %i: %f" % (genCount, allMetrics[0]))
print("From structure: %s (%f)\n" % (allStructs[0].topFile, allStructs[0].metric))
genCount += 1
| 52.064159
| 340
| 0.702885
| 2,959
| 23,533
| 5.581615
| 0.179115
| 0.004359
| 0.003633
| 0.003391
| 0.843425
| 0.830891
| 0.815936
| 0.803948
| 0.803948
| 0.803948
| 0
| 0.009676
| 0.222709
| 23,533
| 451
| 341
| 52.179601
| 0.893232
| 0.340968
| 0
| 0.740072
| 0
| 0.025271
| 0.456445
| 0.024805
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00722
| false
| 0.00722
| 0.061372
| 0
| 0.068592
| 0.140794
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e68082d00429fc3589e8b3431f613742cd95f4c
| 2,182
|
py
|
Python
|
tests/users.py
|
gaspatchi/usver
|
a67ccbb07bac1662ec099262ea276116ead67efa
|
[
"Apache-2.0"
] | null | null | null |
tests/users.py
|
gaspatchi/usver
|
a67ccbb07bac1662ec099262ea276116ead67efa
|
[
"Apache-2.0"
] | null | null | null |
tests/users.py
|
gaspatchi/usver
|
a67ccbb07bac1662ec099262ea276116ead67efa
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import requests
class TestValideUserService(unittest.TestCase):
token = ""
def setUp(self):
self.host = "http://127.0.0.1"
self.register_profile = {
"firstname": "Никита",
"lastname": "Бережной",
"email": "nikitoshi@test.ru",
"password": "13371488"
}
self.login_profile = {
"firstname": "Никита",
"lastname": "Бережной",
"email": "nikitoshi@gaspatchi.ru",
"password": "13371488"
}
def test_register(self):
result = requests.post("{0}/user/register".format(self.host),json=self.register_profile)
body = result.json()
self.assertEqual(result.status_code,200)
self.assertIn("message",body)
def test_login(self):
result = requests.post("{0}/user/login".format(self.host),json=self.login_profile)
body = result.json()
self.assertEqual(result.status_code,200)
self.assertIn("token",body)
self.__class__.token = body["token"]
def test_select(self):
result = requests.get("{0}/user".format(self.host),headers={"Authorization": "Bearer {0}".format(self.token)})
body = result.json()
self.assertEqual(result.status_code,200)
self.assertIn("info",body)
self.assertIn("subscription",body)
class TestInvalideUserService(unittest.TestCase):
def setUp(self):
self.host = "http://127.0.0.1"
self.register_profile = {
"firtname": "Никита",
"lastname": "Бережной",
"email": "nikitoshi@test.ru",
"passord": "13371488"
}
self.login_profile = {
"firstname": "Никита",
"lastname": "Бережной",
"mail": "nikitoshi@gaspatchi.ru",
"password": ""
}
def test_register(self):
result = requests.post("{0}/user/register".format(self.host),json=self.register_profile)
body = result.json()
self.assertEqual(result.status_code,400)
self.assertIn("message",body)
def test_login(self):
result = requests.post("{0}/user/login".format(self.host),json=self.login_profile)
body = result.json()
self.assertEqual(result.status_code,400)
self.assertIn("message",body)
def test_select(self):
result = requests.get("{0}/user".format(self.host),headers={"Authorization": "Bearer {0}".format(self.token)})
self.assertEqual(result.status_code,403)
self.assertIn("message",body)
| 27.620253
| 112
| 0.695692
| 274
| 2,182
| 5.452555
| 0.19708
| 0.048193
| 0.072289
| 0.108434
| 0.802544
| 0.781794
| 0.781794
| 0.710174
| 0.636546
| 0.636546
| 0
| 0.032427
| 0.12374
| 2,182
| 79
| 113
| 27.620253
| 0.748954
| 0
| 0
| 0.703125
| 0
| 0
| 0.222171
| 0.020156
| 0
| 0
| 0
| 0
| 0.203125
| 1
| 0.125
| false
| 0.0625
| 0.03125
| 0
| 0.203125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
4e76cff124111e32d1481ca7e82067dcd99f6ff6
| 5,017
|
py
|
Python
|
violas_client/bank_client/test/test_transaction.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | null | null | null |
violas_client/bank_client/test/test_transaction.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | null | null | null |
violas_client/bank_client/test/test_transaction.py
|
violas-core/violas-client
|
e8798f7d081ac218b78b81fd7eb2f8da92631a16
|
[
"MIT"
] | 1
|
2022-01-05T06:49:42.000Z
|
2022-01-05T06:49:42.000Z
|
from violas_client import Client, Wallet
from violas_client.banktypes.bytecode import CodeType
client = Client("bj_testnet")
def test_get_code_type():
wallet = Wallet.new()
a1 = wallet.new_account()
client.mint_coin(a1.address, 300_000_000, auth_key_prefix=a1.auth_key_prefix, currency_code="VUSDT")
seq = client.bank_publish(a1)
assert client.get_account_transaction(a1.address, seq).get_code_type() == CodeType.PUBLISH
seq = client.bank_lock2(a1, 100_000_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_code_type() == CodeType.LOCK2
seq = client.bank_borrow2(a1, 1_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_code_type() == CodeType.BORROW2
_, amount = client.bank_get_borrow_amount(a1.address, currency_code="VUSDT")
seq = client.bank_repay_borrow2(a1, currency_code="VUSDT", amount=amount)
assert client.get_account_transaction(a1.address, seq).get_code_type() == CodeType.REPAY_BORROW2
amount = client.bank_get_lock_amount(a1.address, currency_code="VUSDT")
seq = client.bank_redeem2(a1, currency_code="VUSDT", amount=amount)
assert client.get_account_transaction(a1.address, seq).get_code_type() == CodeType.REDEEM2
def test_get_amount():
wallet = Wallet.new()
a1 = wallet.new_account()
client.mint_coin(a1.address, 300_000_000, auth_key_prefix=a1.auth_key_prefix, currency_code="VUSDT")
seq = client.bank_publish(a1)
assert client.get_account_transaction(a1.address, seq).get_amount() == None
seq = client.bank_lock2(a1, 100_000_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_amount() == 100_000_000
seq = client.bank_borrow2(a1, 1_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_amount() == 1_000
seq = client.bank_repay_borrow2(a1, amount=100, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_amount() == 100
seq = client.bank_redeem2(a1, currency_code="VUSDT", amount=100)
assert client.get_account_transaction(a1.address, seq).get_amount() == 100
def test_get_currency_code():
wallet = Wallet.new()
a1 = wallet.new_account()
client.mint_coin(a1.address, 300_000_000, auth_key_prefix=a1.auth_key_prefix, currency_code="VUSDT")
seq = client.bank_publish(a1)
assert client.get_account_transaction(a1.address, seq).get_currency_code() == None
seq = client.bank_lock2(a1, 100_000_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_currency_code() == "VUSDT"
seq = client.bank_borrow2(a1, 1_000, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_currency_code() == "VUSDT"
seq = client.bank_repay_borrow2(a1, amount=100, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_currency_code() == "VUSDT"
seq = client.bank_redeem2(a1, amount=100, currency_code="VUSDT")
assert client.get_account_transaction(a1.address, seq).get_currency_code() == "VUSDT"
def test_get_data():
data = "data"
data_hex = b"data".hex()
wallet = Wallet.new()
a1 = wallet.new_account()
client.mint_coin(a1.address, 300_000_000, auth_key_prefix=a1.auth_key_prefix, currency_code="VUSDT")
seq = client.bank_publish(a1, data=data)
assert client.get_account_transaction(a1.address, seq).get_data() == data_hex
seq = client.bank_lock2(a1, 100_000_000, currency_code="VUSDT", data=data)
assert client.get_account_transaction(a1.address, seq).get_data() == data_hex
seq = client.bank_borrow2(a1, 1_000, currency_code="VUSDT", data=data)
assert client.get_account_transaction(a1.address, seq).get_data() == data_hex
seq = client.bank_repay_borrow2(a1, amount=100, currency_code="VUSDT", data=data)
assert client.get_account_transaction(a1.address, seq).get_data() == data_hex
seq = client.bank_redeem2(a1, amount=100, currency_code="VUSDT", data=data)
assert client.get_account_transaction(a1.address, seq).get_data() == data_hex
def test_get_incentive():
wallet = Wallet.new()
a1 = wallet.new_account()
client.mint_coin(a1.address, 300_000_000, auth_key_prefix=a1.auth_key_prefix, currency_code="VUSDT")
seq = client.bank_publish(a1)
assert client.get_account_transaction(a1.address, seq).get_currency_code() == None
client.bank_lock2(a1, 100_000_000, currency_code="VUSDT")
seq = client.bank_borrow2(a1, 1_000_000, currency_code="VUSDT")
tx = client.get_account_transaction(a1.address, seq)
assert tx.get_incentive() != None
seq = client.bank_repay_borrow2(a1, amount=100, currency_code="VUSDT")
tx = client.get_account_transaction(a1.address, seq)
assert tx.get_incentive() != None
seq = client.bank_redeem2(a1, amount=100, currency_code="VUSDT")
tx = client.get_account_transaction(a1.address, seq)
assert tx.get_incentive() != None
| 45.609091
| 104
| 0.745266
| 733
| 5,017
| 4.789905
| 0.065484
| 0.116206
| 0.1501
| 0.184563
| 0.925662
| 0.909997
| 0.909997
| 0.909997
| 0.909997
| 0.866135
| 0
| 0.056357
| 0.129958
| 5,017
| 110
| 105
| 45.609091
| 0.747995
| 0
| 0
| 0.620253
| 0
| 0
| 0.034476
| 0
| 0
| 0
| 0
| 0
| 0.303797
| 1
| 0.063291
| false
| 0
| 0.025316
| 0
| 0.088608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e81cf9bb0ccb4ec4b5c78673eb64908dac3d762
| 17,694
|
py
|
Python
|
tests/test_pycran.py
|
imanhodjaev/pycran
|
3cb1fec67f94a9cac0085c74ec188faa2cd65df3
|
[
"Apache-2.0"
] | 1
|
2020-03-19T09:50:10.000Z
|
2020-03-19T09:50:10.000Z
|
tests/test_pycran.py
|
imanhodjaev/pycran
|
3cb1fec67f94a9cac0085c74ec188faa2cd65df3
|
[
"Apache-2.0"
] | 1
|
2020-03-23T14:46:31.000Z
|
2020-03-23T16:38:06.000Z
|
tests/test_pycran.py
|
imanhodjaev/pycran
|
3cb1fec67f94a9cac0085c74ec188faa2cd65df3
|
[
"Apache-2.0"
] | null | null | null |
import re
import tarfile
import textwrap
from io import StringIO
from os import path
from zipfile import ZipFile
import pytest
from debian.deb822 import Deb822
import pycran
from pycran.errors import DescriptionNotFound, NotTarFile
data_path = path.join(path.dirname(__file__), "data")
def test_parse_works_with_normal_data():
data = """
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
Package: abbyyR
Version: 0.5.5
Depends: R (>= 3.2.0)
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
License: MIT + file LICENSE
MD5sum: e048a3bca6ea32126e6c367415c0bfaf
NeedsCompilation: no
"""
packages = list(pycran.parse(data))
assert len(packages) == 2
assert packages[0] == {
"Package": "ABACUS",
"Version": "1.0.0",
"Depends": "R (>= 3.1.0)",
"Imports": "ggplot2 (>= 3.1.0), shiny (>= 1.3.1),",
"Suggests": "rmarkdown (>= 1.13), knitr (>= 1.22)",
"License": "GPL-3",
"MD5sum": "50c54c4da09307cb95a70aaaa54b9fbd",
"NeedsCompilation": "no",
}
def test_parse_works_with_empty_data():
assert list(pycran.parse("")) == []
def test_parse_works_on_non_separated_data():
data = """Package: abc
Version: 2.1
Depends: R (>= 2.10), abc.data, nnet, quantreg, MASS, locfit
License: GPL (>= 3)
MD5sum: c9fffe4334c178917f762735aba59653
NeedsCompilation: no
Package: abc.data
Version: 1.0
Depends: R (>= 2.10)
License: GPL (>= 3)
MD5sum: 799079dbbdd0cfc9d9c61c3e35241806
NeedsCompilation: no"""
result = list(pycran.parse(data))
assert len(result) == 2
assert result == [
{
"Package": "abc",
"Version": "2.1",
"Depends": "R (>= 2.10), abc.data, nnet, quantreg, MASS, locfit",
"License": "GPL (>= 3)",
"MD5sum": "c9fffe4334c178917f762735aba59653",
"NeedsCompilation": "no",
},
{
"Package": "abc.data",
"Version": "1.0",
"Depends": "R (>= 2.10)",
"License": "GPL (>= 3)",
"MD5sum": "799079dbbdd0cfc9d9c61c3e35241806",
"NeedsCompilation": "no",
},
]
data = """
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
Package: abbyyR
Version: 0.5.5
Depends: R (>= 3.2.0)
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
License: MIT + file LICENSE
MD5sum: e048a3bca6ea32126e6c367415c0bfaf
NeedsCompilation: no
"""
assert len(list(pycran.parse(data))) == 2
def test_parse_works_on_mixed_data():
data = """Package: abc
Version: 2.1
Depends: R (>= 2.10), abc.data, nnet, quantreg, MASS, locfit
License: GPL (>= 3)
MD5sum: c9fffe4334c178917f762735aba59653
NeedsCompilation: no
Package: abc.data
Version: 1.0
Depends: R (>= 2.10)
License: GPL (>= 3)
MD5sum: 799079dbbdd0cfc9d9c61c3e35241806
NeedsCompilation: no
Package: abbyyR
Version: 0.5.5
Depends: R (>= 3.2.0)
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
License: MIT + file LICENSE
MD5sum: e048a3bca6ea32126e6c367415c0bfaf
NeedsCompilation: no
"""
result = list(pycran.parse(data))
assert len(result) == 3
assert result == [
{
"Package": "abc",
"Version": "2.1",
"Depends": "R (>= 2.10), abc.data, nnet, quantreg, MASS, locfit",
"License": "GPL (>= 3)",
"MD5sum": "c9fffe4334c178917f762735aba59653",
"NeedsCompilation": "no",
},
{
"Package": "abc.data",
"Version": "1.0",
"Depends": "R (>= 2.10)",
"License": "GPL (>= 3)",
"MD5sum": "799079dbbdd0cfc9d9c61c3e35241806",
"NeedsCompilation": "no",
},
{
"Package": "abbyyR",
"Version": "0.5.5",
"Depends": "R (>= 3.2.0)",
"Imports": "httr, XML, curl, readr, plyr, progress",
"Suggests": "testthat, rmarkdown, knitr (>= 1.11), lintr",
"License": "MIT + file LICENSE",
"MD5sum": "e048a3bca6ea32126e6c367415c0bfaf",
"NeedsCompilation": "no",
},
]
def test_parse_properly_parses_non_field_lines():
data = b"""
Package: abbyyR
Title: Access to Abbyy Optical Character Recognition (OCR) API
Version: 0.5.5
Authors@R: person("Gaurav", "Sood", email = "gsood07@gmail.com", role = c("aut", "cre"))
Maintainer: Gaurav Sood <gsood07@gmail.com>
Description: Get text from images of text using Abbyy Cloud Optical Character
Recognition (OCR) API. Easily OCR images, barcodes, forms, documents with
machine readable zones, e.g. passports. Get the results in a variety of formats
including plain text and XML. To learn more about the Abbyy OCR API, see
<http://ocrsdk.com/>.
URL: http://github.com/soodoku/abbyyR
BugReports: http://github.com/soodoku/abbyyR/issues
Depends: R (>= 3.2.0)
License: MIT + file LICENSE
LazyData: true
VignetteBuilder: knitr
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
RoxygenNote: 6.1.1
NeedsCompilation: no
Packaged: 2019-06-25 01:30:58 UTC; soodoku
Author: Gaurav Sood [aut, cre]
Repository: CRAN
Date/Publication: 2019-06-25 04:30:04 UTC
"""
[package] = list(pycran.parse(data))
assert "<http" not in package
assert "<http://ocrsdk.com/>" in package["Description"]
def test_parse_works_with_binary_data():
data = b"""
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
Package: abbyyR
Version: 0.5.5
Depends: R (>= 3.2.0)
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
License: MIT + file LICENSE
MD5sum: e048a3bca6ea32126e6c367415c0bfaf
NeedsCompilation: no
"""
assert len(list(pycran.parse(data))) == 2
def test_parse_can_parse_all_entries_from_cran_registry():
# Test on real package metadata from https://cran.r-project.org/src/contrib/PACKAGES
with ZipFile(path.join(data_path, "PACKAGES.txt.zip")) as archive:
with archive.open("PACKAGES.txt") as fp:
assert len(list(pycran.parse(fp.read()))) == 15397
def test_parse_can_parse_mixed_entries_from_cran_registry():
with open(path.join(data_path, "PACKAGES_MIX.txt")) as fp:
assert list(pycran.parse(fp.read())) == [
{
"Package": "A3",
"Version": "1.0.0",
"Depends": "R (>= 2.15.0), xtable, pbapply",
"Suggests": "randomForest, e1071",
"License": "GPL (>= 2)",
"MD5sum": "027ebdd8affce8f0effaecfcd5f5ade2",
"NeedsCompilation": "no",
},
{
"Package": "A8",
"Version": "1.0.0",
"Depends": "R (>= 2.15.0), xtable, pbapply",
"Suggests": "randomForest, e1071",
"License": "GPL (>= 2)",
"MD5sum": "027ebdd8affce8f0effaecfcd5f5ade2",
"NeedsCompilation": "no",
},
{
"Package": "aaSEA",
"Version": "1.1.0",
"Depends": "R(>= 3.4.0)",
"Imports": "DT(>= 0.4), networkD3(>= 0.4), shiny(>= 1.0.5), shinydashboard(>= 0.7.0), magrittr(>= 1.5), Bios2cor(>= 2.0), seqinr(>= 3.4-5), plotly(>= 4.7.1), Hmisc(>= 4.1-1)",
"Suggests": "knitr, rmarkdown",
"License": "GPL-3",
"MD5sum": "0f9aaefc1f1cf18b6167f85dab3180d8",
"NeedsCompilation": "no",
},
]
def test_encode():
metadata = """
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
"""
result = pycran.encode(
{
"Package": "ABACUS",
"Version": "1.0.0",
"Depends": "R (>= 3.1.0)",
"Imports": "ggplot2 (>= 3.1.0), shiny (>= 1.3.1),",
"Suggests": "rmarkdown (>= 1.13), knitr (>= 1.22)",
"License": "GPL-3",
"MD5sum": "50c54c4da09307cb95a70aaaa54b9fbd",
"NeedsCompilation": "no",
}
)
def clean(data):
return "\n".join([line.strip() for line in data.split("\n")]).strip()
# we want to assert result and expected result without
# any leading or trailing spaces thus cutting them off.
assert clean(metadata) == clean(result)
def test_decode_works():
deb_data = """
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
"""
expected = {
"Package": "ABACUS",
"Version": "1.0.0",
"Depends": "R (>= 3.1.0)",
"Imports": "ggplot2 (>= 3.1.0), shiny (>= 1.3.1),",
"Suggests": "rmarkdown (>= 1.13), knitr (>= 1.22)",
"License": "GPL-3",
"MD5sum": "50c54c4da09307cb95a70aaaa54b9fbd",
"NeedsCompilation": "no",
}
assert pycran.decode(deb_data) == expected
def test_decode_returns_none_if_empty_string_given():
assert pycran.decode("") is None
def test_from_file_path_works():
assert pycran.from_file(path.join(data_path, "A3_1.0.0.tar.gz")) == {
"Package": "A3",
"Type": "Package",
"Title": "Accurate, Adaptable, and Accessible Error Metrics for Predictive Models",
"Version": "1.0.0",
"Date": "2015-08-15",
"Author": "Scott Fortmann-Roe",
"Maintainer": "Scott Fortmann-Roe <scottfr@berkeley.edu>",
"Description": "Supplies tools for tabulating and analyzing the results of predictive models. The methods employed are applicable to virtually any predictive model and make comparisons between different methodologies straightforward.",
"License": "GPL (>= 2)",
"Depends": "R (>= 2.15.0), xtable, pbapply",
"Suggests": "randomForest, e1071",
"NeedsCompilation": "no",
"Packaged": "2015-08-16 14:17:33 UTC; scott",
"Repository": "CRAN",
"Date/Publication": "2015-08-16 23:05:52",
}
def test_from_file_tar_file_works():
assert pycran.from_file(tarfile.open(path.join(data_path, "A3_1.0.0.tar.gz"))) == {
"Package": "A3",
"Type": "Package",
"Title": "Accurate, Adaptable, and Accessible Error Metrics for Predictive Models",
"Version": "1.0.0",
"Date": "2015-08-15",
"Author": "Scott Fortmann-Roe",
"Maintainer": "Scott Fortmann-Roe <scottfr@berkeley.edu>",
"Description": "Supplies tools for tabulating and analyzing the results of predictive models. The methods employed are applicable to virtually any predictive model and make comparisons between different methodologies straightforward.",
"License": "GPL (>= 2)",
"Depends": "R (>= 2.15.0), xtable, pbapply",
"Suggests": "randomForest, e1071",
"NeedsCompilation": "no",
"Packaged": "2015-08-16 14:17:33 UTC; scott",
"Repository": "CRAN",
"Date/Publication": "2015-08-16 23:05:52",
}
def test_from_file_path_raises_exception_if_description_not_found():
with pytest.raises(DescriptionNotFound):
pycran.from_file(path.join(data_path, "A3_no_description.tar.gz"))
def test_from_file_tar_file_raises_exception_if_description_not_found():
with pytest.raises(DescriptionNotFound):
pycran.from_file(tarfile.open(path.join(data_path, "A3_no_description.tar.gz")))
def test_from_file_path_raises_exception_if_not_exists():
with pytest.raises(FileNotFoundError):
pycran.from_file(path.join(data_path, "bobo.tar.gz"))
def test_from_file_path_raises_exception_if_file_is_not_tarfile():
with pytest.raises(NotTarFile):
pycran.from_file(path.join(data_path, "PACKAGES_MIX.txt"))
# Cross validation tests to check if parsing is valid and correct
# we will use `deb-pkg-tools` package to parse and test matches
# NOTE: our parser intentionally strips whitespaces and
# `deb-pkg-tools` preserves them all.
def _deb_parse(sequence: str):
# We need to strip new lines from field values
parsed = dict(Deb822(StringIO(textwrap.dedent(sequence).strip())))
_strip_and_clean(parsed)
return parsed
def _strip_and_clean(parsed):
for key in parsed:
val = parsed[key]
parsed[key] = re.sub(r"\n", "", val)
parsed[key] = re.sub(r"\s+", " ", val)
def test_cross_validation_simple_parse():
deb_data = """
Package: ABACUS
Version: 1.0.0
Depends: R (>= 3.1.0)
Imports: ggplot2 (>= 3.1.0), shiny (>= 1.3.1),
Suggests: rmarkdown (>= 1.13), knitr (>= 1.22)
License: GPL-3
MD5sum: 50c54c4da09307cb95a70aaaa54b9fbd
NeedsCompilation: no
"""
expected = {
"Package": "ABACUS",
"Version": "1.0.0",
"Depends": "R (>= 3.1.0)",
"Imports": "ggplot2 (>= 3.1.0), shiny (>= 1.3.1),",
"Suggests": "rmarkdown (>= 1.13), knitr (>= 1.22)",
"License": "GPL-3",
"MD5sum": "50c54c4da09307cb95a70aaaa54b9fbd",
"NeedsCompilation": "no",
}
assert _deb_parse(deb_data) == pycran.decode(deb_data)
assert _deb_parse(deb_data) == expected
assert pycran.decode(deb_data) == expected
def test_cross_validation_large_sequence_parse():
deb_data = """
Package: abbyyR
Title: Access to Abbyy Optical Character Recognition (OCR) API
Version: 0.5.5
Authors@R: person("Gaurav", "Sood", email = "gsood07@gmail.com", role = c("aut", "cre"))
Maintainer: Gaurav Sood <gsood07@gmail.com>
Description: Get text from images of text using Abbyy Cloud Optical Character
Recognition (OCR) API. Easily OCR images, barcodes, forms, documents with
machine readable zones, e.g. passports. Get the results in a variety of formats
including plain text and XML. To learn more about the Abbyy OCR API, see
<http://ocrsdk.com/>.
URL: http://github.com/soodoku/abbyyR
BugReports: http://github.com/soodoku/abbyyR/issues
Depends: R (>= 3.2.0)
License: MIT + file LICENSE
LazyData: true
VignetteBuilder: knitr
Imports: httr, XML, curl, readr, plyr, progress
Suggests: testthat, rmarkdown, knitr (>= 1.11), lintr
RoxygenNote: 6.1.1
NeedsCompilation: no
Packaged: 2019-06-25 01:30:58 UTC; soodoku
Author: Gaurav Sood [aut, cre]
Repository: CRAN
Date/Publication: 2019-06-25 04:30:04 UTC
"""
expected = {
"Package": "abbyyR",
"Title": "Access to Abbyy Optical Character Recognition (OCR) API",
"Version": "0.5.5",
"Authors@R": 'person("Gaurav", "Sood", email = "gsood07@gmail.com", role = c("aut", "cre"))',
"Maintainer": "Gaurav Sood <gsood07@gmail.com>",
"Description": "Get text from images of text using Abbyy Cloud Optical Character Recognition (OCR) API. Easily OCR images, barcodes, forms, documents with machine readable zones, e.g. passports. Get the results in a variety of formats including plain text and XML. To learn more about the Abbyy OCR API, see <http://ocrsdk.com/>.",
"URL": "http://github.com/soodoku/abbyyR",
"BugReports": "http://github.com/soodoku/abbyyR/issues",
"Depends": "R (>= 3.2.0)",
"License": "MIT + file LICENSE",
"LazyData": "true",
"VignetteBuilder": "knitr",
"Imports": "httr, XML, curl, readr, plyr, progress",
"Suggests": "testthat, rmarkdown, knitr (>= 1.11), lintr",
"RoxygenNote": "6.1.1",
"NeedsCompilation": "no",
"Packaged": "2019-06-25 01:30:58 UTC; soodoku",
"Author": "Gaurav Sood [aut, cre]",
"Repository": "CRAN",
"Date/Publication": "2019-06-25 04:30:04 UTC",
}
assert _deb_parse(deb_data) == pycran.decode(deb_data)
assert _deb_parse(deb_data) == expected
assert pycran.decode(deb_data) == expected
def test_cross_validation_cran_index_parse():
# Test on real package metadata from https://cran.r-project.org/src/contrib/PACKAGES
with ZipFile(path.join(data_path, "PACKAGES.txt.zip")) as archive:
with archive.open("PACKAGES.txt") as fp:
data = fp.read()
pycran_parsed = list(pycran.parse(data))
deb_parsed = list(Deb822.iter_paragraphs(data))
assert len(pycran_parsed) == len(deb_parsed)
for i, pkg in enumerate(pycran_parsed):
pyc_pkg = pycran_parsed[i]
_strip_and_clean(pkg)
assert pkg == pyc_pkg
| 35.60161
| 339
| 0.599864
| 2,121
| 17,694
| 4.916549
| 0.150872
| 0.008055
| 0.005754
| 0.030974
| 0.833046
| 0.806483
| 0.802455
| 0.792482
| 0.787495
| 0.787495
| 0
| 0.083283
| 0.251498
| 17,694
| 496
| 340
| 35.673387
| 0.704092
| 0.030123
| 0
| 0.712617
| 0
| 0.016355
| 0.594251
| 0.053874
| 0
| 0
| 0
| 0
| 0.060748
| 1
| 0.053738
| false
| 0.007009
| 0.067757
| 0.002336
| 0.126168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e9cb9c6b5af43faa695c10d0f7c428eb8ded1c4
| 21,836
|
py
|
Python
|
web/transiq/restapi/tests/tests_validators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/restapi/tests/tests_validators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/restapi/tests/tests_validators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import random
from django.test import TestCase
from restapi.helper_api import generate_random_uppercase_string, random_with_N_digits, \
generate_random_string_except_given_string, generate_random_string_with_given_string, \
generate_random_lowercase_string
from restapi.service.validators import validate_pan, validate_mobile_number, validate_ifsc, validate_gstin, \
validate_name, validate_pin, validate_vehicle_number
class TestValidPan(unittest.TestCase):
def test_valid_pan_success(self):
self.assertTrue(validate_pan('{}{}{}{}{}'.format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(n=4), generate_random_uppercase_string(
N=1)))) # pan validation with valid input
self.assertTrue(validate_pan('{}{}{}{}{}'.format(generate_random_lowercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=1), random_with_N_digits(4),
generate_random_lowercase_string(
N=1)))) # pan validation with valid input
def test_valid_pan_failure(self):
self.assertFalse(validate_pan(" {}{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_except_given_string(
value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(4), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (starting with whitespace)
self.assertFalse(validate_pan("{}{}{}{}{} ".format(generate_random_uppercase_string(N=3),
generate_random_string_except_given_string(
value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(4), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (ending with whitespace)
self.assertFalse(validate_pan("{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=1), random_with_N_digits(
5)))) # pan validation with invalid input (last character in numeric)
self.assertFalse(validate_pan(
"{}{}{}{}{}{}".format(random_with_N_digits(n=1), generate_random_uppercase_string(N=2),
generate_random_string_with_given_string(value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1), random_with_N_digits(4),
generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (first character numeric)
self.assertFalse(validate_pan(
"{}{}{}{}{}{}{}".format(generate_random_uppercase_string(N=1), random_with_N_digits(n=1),
generate_random_uppercase_string(N=1),
generate_random_string_with_given_string(value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1), random_with_N_digits(4),
generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (second character numeric)
self.assertFalse(validate_pan(
"{}{}{}{}{}{}".format(generate_random_uppercase_string(N=2), random_with_N_digits(n=1),
generate_random_string_with_given_string(value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1), random_with_N_digits(4),
generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (third character numeric)
self.assertFalse(validate_pan("{}{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_except_given_string(
value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(4), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (fourth character is out of range)
self.assertFalse(validate_pan("{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
random_with_N_digits(5), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (fifth character is numeric)
self.assertFalse(validate_pan("{}{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=2),
random_with_N_digits(3), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (sixth character is alphabet)
self.assertFalse(validate_pan("{}{}{}{}{}{}{}".format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(
value='abcfghljpte', N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(1),
generate_random_uppercase_string(N=1),
random_with_N_digits(2), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (seventh character is alphabet)
self.assertFalse(validate_pan('{}{}{}{}{}P'.format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(4), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (length > 10)
self.assertFalse(validate_pan('A{}{}{}{}{}'.format(generate_random_uppercase_string(N=3),
generate_random_string_with_given_string(value='abcfghljpte',
N=1),
generate_random_uppercase_string(N=1),
random_with_N_digits(4), generate_random_uppercase_string(
N=1)))) # pan validation with invalid input (length > 10)
self.assertFalse(validate_pan(None)) # pan validation with invalid input (passed value None)
self.assertFalse(validate_pan("")) # pan validation with invalid input (passed value "")
self.assertFalse(validate_pan("not")) # pan validation with invalid input (passed value "not")
class TestValidMobileNumber(unittest.TestCase):
def test_valid_mobile_number_success(self):
self.assertTrue(validate_mobile_number(
'{}{}'.format(random.randint(1, 9), random_with_N_digits(9)))) # valid mobile number
# self.assertTrue(validate_mobile_number(u"{}{}".format(random.randint(1,10), random_with_N_digits(9)))) #valid mobile number
def test_valid_mobile_number_failure(self):
self.assertFalse(validate_mobile_number(' {}{}'.format(random.randint(1, 9), random_with_N_digits(
9)))) # invalid mobile number (starting with whitespace)
self.assertFalse(validate_mobile_number('{}{} '.format(random.randint(1, 9), random_with_N_digits(
9)))) # invalid mobile number (ending with whitespace)
self.assertFalse(validate_mobile_number('{}{}{}'.format(random.randint(1, 9), random_with_N_digits(9),
generate_random_uppercase_string(
N=1)))) # extra character at the end
self.assertFalse(validate_mobile_number('{}{}'.format(random.randint(1, 9), random_with_N_digits(10))))
self.assertFalse(validate_mobile_number('{}{}'.format(random.randint(1, 9), random_with_N_digits(8))))
self.assertFalse(validate_mobile_number(None))
self.assertFalse(validate_mobile_number(""))
self.assertFalse(validate_mobile_number("transiq tec"))
class TestValidIfsc(unittest.TestCase):
def test_valid_ifsc_success(self):
self.assertEqual(
validate_ifsc('{}0{}'.format(generate_random_uppercase_string(4), generate_random_lowercase_string(6))),
True) # valid ifsc
self.assertEqual(validate_ifsc('{}0{}'.format(generate_random_lowercase_string(4), random_with_N_digits(6))),
True) # valid ifsc
self.assertEqual(validate_ifsc(
'{}0{}{}{}{}'.format(generate_random_uppercase_string(4), random_with_N_digits(2),
generate_random_lowercase_string(2), random_with_N_digits(1),
generate_random_uppercase_string(1))), True) # valid ifsc
def test_valid_ifsc_failure(self):
self.assertFalse(validate_ifsc(' {}0{}'.format(generate_random_uppercase_string(4),
generate_random_lowercase_string(
6)))) # invalid ifsc (staring with whitespace)
self.assertFalse(validate_ifsc('{}0{} '.format(generate_random_uppercase_string(4),
generate_random_lowercase_string(
6)))) # invalid ifsc (ending with whitespace)
self.assertFalse(validate_ifsc('{}2{} '.format(generate_random_uppercase_string(4),
generate_random_lowercase_string(
6)))) # invalid ifsc (fifth character is other than 0)
self.assertFalse(validate_ifsc('{}{}0{} '.format(random_with_N_digits(1), generate_random_uppercase_string(3),
generate_random_lowercase_string(
6)))) # invalid ifsc (first character is numeric)
self.assertFalse(validate_ifsc('{}{}0{} '.format(random_with_N_digits(1), generate_random_uppercase_string(3),
generate_random_lowercase_string(
6)))) # invalid ifsc (first character is numeric)
self.assertFalse(validate_ifsc('{}{}{}0{} '.format(generate_random_uppercase_string(1), random_with_N_digits(1),
generate_random_uppercase_string(2), random_with_N_digits(
6)))) # invalid ifsc (second character is numeric)
self.assertFalse(validate_ifsc(
'{}{}{}0{}{}'.format(generate_random_uppercase_string(2), random_with_N_digits(1),
generate_random_lowercase_string(1), generate_random_uppercase_string(3),
random_with_N_digits(3)))) # invlid ifsc (third character is numeric)
self.assertFalse(validate_ifsc(
'{}{}0{}{}{}'.format(generate_random_uppercase_string(3), random_with_N_digits(1),
generate_random_lowercase_string(2), generate_random_uppercase_string(2),
random_with_N_digits(2)))) # invlid ifsc (fourth character is numeric)
# self.assertFalse(validate_ifsc(None)) #invalid ifsc (passing none)
self.assertFalse(validate_ifsc("")) # invalid ifsc (passing "")
self.assertFalse(validate_ifsc("transiq tec")) # invalid ifsc
class TestValidGstin(unittest.TestCase):
def test_valid_gstin_success(self):
self.assertEqual(validate_gstin(
'{}{}{}{}{}z{}'.format(random_with_N_digits(2), generate_random_uppercase_string(5),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1), random_with_N_digits(1))), True) # valid gstin
self.assertEqual(validate_gstin(
'{}{}{}{}{}Z{}'.format(random_with_N_digits(2), generate_random_uppercase_string(5),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1), generate_random_lowercase_string(1))), True) # valid gstin
def test_valid_gstin_failure(self):
self.assertFalse(validate_gstin(
' {}{}{}{}{}z{}'.format(random_with_N_digits(2), generate_random_uppercase_string(5),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1), random_with_N_digits(1))),
True) # invalid gstin (starting with whitespace)
self.assertFalse(validate_gstin(
'{}{}{}{}z{}'.format(generate_random_uppercase_string(5), random_with_N_digits(4),
generate_random_uppercase_string(1), random_with_N_digits(1),
random_with_N_digits(1))), True) # invalid gstin (missing first two characters)
self.assertFalse(validate_gstin(
'{}{}{}{}{}z{} '.format(random_with_N_digits(2), generate_random_uppercase_string(5),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1), random_with_N_digits(1))),
True) # invalid gs tin (starting with whitespace)
self.assertFalse(validate_gstin(
'{}{}{}{}{}z'.format(random_with_N_digits(2), generate_random_uppercase_string(5), random_with_N_digits(4),
generate_random_uppercase_string(1), random_with_N_digits(1))),
True) # invalid gstin (missing last character)
self.assertFalse(validate_gstin(
'{}{}{}{}{}{}{}'.format(random_with_N_digits(2), generate_random_uppercase_string(5),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1), generate_random_string_except_given_string(value='z', N=1),
generate_random_lowercase_string(1))),
True) # invalid gstin (14th character is other than z)
self.assertFalse(validate_gstin(
'{}{}{}{}{}{}z{}'.format(generate_random_lowercase_string(1), random_with_N_digits(1),
generate_random_uppercase_string(5), random_with_N_digits(4),
generate_random_uppercase_string(1), random_with_N_digits(1),
random_with_N_digits(1))), True) # valid gstin (1st character is alphabet)
self.assertFalse(validate_gstin(
'{}{}{}{}{}{}z{}'.format(random_with_N_digits(1), generate_random_uppercase_string(1),
generate_random_uppercase_string(5), random_with_N_digits(4),
generate_random_uppercase_string(1), random_with_N_digits(1),
random_with_N_digits(1))), True) # invalid gstin (2nd character is alphabet)
self.assertFalse(validate_ifsc(
'{}{}{}{}{}{}{}z{}'.format(random_with_N_digits(2), generate_random_uppercase_string(2),
random_with_N_digits(1), generate_random_uppercase_string(2),
random_with_N_digits(4), generate_random_uppercase_string(1),
random_with_N_digits(1),
random_with_N_digits(1)))) # invlaid gstin (6th character is numeric)
self.assertFalse(validate_ifsc(
'{}{}{}{}{}Z{}'.format(random_with_N_digits(2), generate_random_uppercase_string(6),
random_with_N_digits(3), generate_random_lowercase_string(1),
random_with_N_digits(1),
generate_random_lowercase_string(1)))) # invalid gstin (8th character is alphabet)
self.assertFalse(validate_ifsc(
'{}{}{}{}{}Z{}'.format(random_with_N_digits(2), generate_random_lowercase_string(5),
random_with_N_digits(3), generate_random_uppercase_string(2),
random_with_N_digits(1),
random_with_N_digits(1)))) # invalid gstin (11th character is alphabet)
self.assertFalse(validate_ifsc(None)) # invalid gstin (passing None)
self.assertFalse(validate_ifsc("")) # invalid gstin (passing "")
self.assertFalse(validate_ifsc("transiq tec")) # invalid gstin
class TestValidName(unittest.TestCase):
def test_valid_name_success(self):
self.assertTrue(validate_name('mani bhushan kumar'))
self.assertTrue(validate_name('mani bhushan289'))
self.assertTrue(validate_name('mani bhushan cg04yt9898'))
self.assertTrue(validate_name('Dr. M.B. Mishra'))
class TestValidPinCode(unittest.TestCase):
def test_valid_pin_code_success(self):
self.assertTrue(validate_pin('843119'))
self.assertTrue(validate_pin(843119))
def test_valid_pin_code_failure(self):
self.assertFalse(validate_pin('043119'))
class TestValidVehicleNumber(unittest.TestCase):
def test_valid_vehicle_number_success(self):
self.assertEqual(validate_vehicle_number("BR-01AQ8864"), True)
self.assertEqual(validate_vehicle_number("cg 11 BB 1774"), True)
self.assertEqual(validate_vehicle_number("GJ.5.cl.2213"), True)
self.assertEqual(validate_vehicle_number("KA 19P 8488"), True)
self.assertEqual(validate_vehicle_number("MP 23 LA 0682"), True)
self.assertEqual(validate_vehicle_number("MH-32-C-1289"), True)
self.assertEqual(validate_vehicle_number("PB03AD4587"), True)
self.assertEqual(validate_vehicle_number("TN-07.aP-3627"), True)
self.assertEqual(validate_vehicle_number("WB-02S 8596"), True)
self.assertEqual(validate_vehicle_number("CH-03-9359"), True)
def test_valid_vehicle_number_failure(self):
self.assertFalse(validate_vehicle_number(" KA 19P 8488"))
self.assertFalse(validate_vehicle_number("KA 19P 8488 "))
self.assertFalse(validate_vehicle_number("KA@19P#8488"))
self.assertFalse(validate_vehicle_number("A 19P 8488"))
self.assertFalse(validate_vehicle_number("1A 19P 8488"))
self.assertFalse(validate_vehicle_number("K2 19P 8488"))
self.assertFalse(validate_vehicle_number("KA P 8488"))
self.assertFalse(validate_vehicle_number("KA C9P 8488"))
self.assertFalse(validate_vehicle_number("KA BCP 8488"))
self.assertFalse(validate_vehicle_number("KA 19P 848"))
self.assertFalse(validate_vehicle_number("KA 19 P 84"))
self.assertFalse(validate_vehicle_number("KA 19P 84AB"))
self.assertFalse(validate_vehicle_number("KA 19P 848D"))
self.assertFalse(validate_vehicle_number("KA 19P ABCD"))
| 21,836
| 21,836
| 0.55523
| 2,148
| 21,836
| 5.271415
| 0.077747
| 0.143425
| 0.078689
| 0.121611
| 0.895522
| 0.827696
| 0.755012
| 0.680915
| 0.652212
| 0.630133
| 0
| 0.027381
| 0.349377
| 21,836
| 1
| 21,836
| 21,836
| 0.769621
| 0.101301
| 0
| 0.540541
| 1
| 0
| 0.049617
| 0
| 0
| 0
| 0
| 0
| 0.328185
| 1
| 0.050193
| false
| 0
| 0.019305
| 0
| 0.096525
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4ea5dcfbca7c95d6a7cf9877048b0a0d570705fd
| 83
|
py
|
Python
|
crizzle/envs/base/__init__.py
|
tasercake/RNN
|
47b56d59411b59d60819ec3e2cf6864521d09c19
|
[
"MIT"
] | 4
|
2019-11-14T04:32:37.000Z
|
2021-12-19T22:43:11.000Z
|
crizzle/envs/base/__init__.py
|
tasercake/Crypto_Algotrader
|
47b56d59411b59d60819ec3e2cf6864521d09c19
|
[
"MIT"
] | 5
|
2018-05-05T09:39:23.000Z
|
2018-08-25T15:42:59.000Z
|
crizzle/envs/base/__init__.py
|
tasercake/Crypto_Algotrader
|
47b56d59411b59d60819ec3e2cf6864521d09c19
|
[
"MIT"
] | 1
|
2018-01-09T15:47:45.000Z
|
2018-01-09T15:47:45.000Z
|
from crizzle.envs.base.broker import Broker
from crizzle.envs.base.feed import Feed
| 41.5
| 43
| 0.843373
| 14
| 83
| 5
| 0.5
| 0.314286
| 0.428571
| 0.542857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 83
| 2
| 44
| 41.5
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4eb1e3c29339f016ced237a909aacb71653db65b
| 5,824
|
py
|
Python
|
build/lib/pyggi/tree/edits.py
|
s-marta/pyggi-bloa
|
aefe15eda32e713dc8402c9b8d4bcb7cb05b31c8
|
[
"MIT"
] | null | null | null |
build/lib/pyggi/tree/edits.py
|
s-marta/pyggi-bloa
|
aefe15eda32e713dc8402c9b8d4bcb7cb05b31c8
|
[
"MIT"
] | null | null | null |
build/lib/pyggi/tree/edits.py
|
s-marta/pyggi-bloa
|
aefe15eda32e713dc8402c9b8d4bcb7cb05b31c8
|
[
"MIT"
] | 1
|
2021-03-12T14:37:06.000Z
|
2021-03-12T14:37:06.000Z
|
import random
from ..base import AbstractEdit
from . import AbstractTreeEngine, XmlEngine
class StmtReplacement(AbstractEdit):
def __init__(self, target, ingredient):
self.target = target
self.ingredient = ingredient
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
return engine.do_replace(program, self, new_contents, modification_points)
@classmethod
def create(cls, program, target_file=None, ingr_file=None, method='random'):
if target_file is None:
target_file = program.random_file(AbstractTreeEngine)
if ingr_file is None:
ingr_file = program.random_file(engine=program.engines[target_file])
assert program.engines[target_file] == program.engines[ingr_file]
return cls(program.random_target(target_file, method),
program.random_target(ingr_file, 'random'))
class StmtInsertion(AbstractEdit):
def __init__(self, target, ingredient, direction='before'):
assert direction in ['before', 'after']
self.target = target
self.ingredient = ingredient
self.direction = direction
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
return engine.do_insert(program, self, new_contents, modification_points)
@classmethod
def create(cls, program, target_file=None, ingr_file=None, direction=None, method='random'):
if target_file is None:
target_file = program.random_file(AbstractTreeEngine)
if ingr_file is None:
ingr_file = program.random_file(engine=program.engines[target_file])
assert program.engines[target_file] == program.engines[ingr_file]
if direction is None:
direction = random.choice(['before', 'after'])
return cls(program.random_target(target_file, method),
program.random_target(ingr_file, 'random'),
direction)
class StmtDeletion(AbstractEdit):
def __init__(self, target):
self.target = target
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
return engine.do_delete(program, self, new_contents, modification_points)
@classmethod
def create(cls, program, target_file=None, method='random'):
if target_file is None:
target_file = program.random_file(AbstractTreeEngine)
return cls(program.random_target(target_file, method))
class StmtMoving(AbstractEdit):
def __init__(self, target, ingredient, direction='before'):
assert direction in ['before', 'after']
self.target = target
self.ingredient = ingredient
self.direction = direction
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
engine.do_insert(program, self, new_contents, modification_points)
self.target, self.ingredient = self.ingredient, self.target
return_code = engine.do_delete(program, self, new_contents, modification_points)
self.target, self.ingredient = self.ingredient, self.target
return return_code
@classmethod
def create(cls, program, target_file=None, ingr_file=None, direction=None, method='random'):
if target_file is None:
target_file = program.random_file(AbstractTreeEngine)
if ingr_file is None:
ingr_file = program.random_file(engine=program.engines[target_file])
assert program.engines[target_file] == program.engines[ingr_file]
if direction is None:
direction = random.choice(['before', 'after'])
return cls(program.random_target(target_file, method),
program.random_target(ingr_file, 'random'),
direction)
class TextSetting(AbstractEdit):
CHOICES = ['']
def __init__(self, target, value):
self.target = target
self.value = value
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
return engine.do_set_text(program, self.target, self.value, new_contents, modification_points)
@classmethod
def create(cls, program, target_file=None, method='random', choices=None):
if choices == None:
choices = cls.CHOICES
if target_file is None:
target_file = program.random_file(XmlEngine)
target = program.random_target(target_file, method)
value = random.choice(choices)
return cls(target, value)
class TextWrapping(AbstractEdit):
CHOICES = [('(', ')')]
def __init__(self, target, value):
self.target = target
self.value = value
def apply(self, program, new_contents, modification_points):
engine = program.engines[self.target[0]]
return engine.do_wrap_text(program, self.target, self.value[0], self.value[1], new_contents, modification_points)
@classmethod
def create(cls, program, target_file=None, method='random', choices=None):
if choices == None:
choices = cls.CHOICES
if target_file is None:
target_file = program.random_file(XmlEngine)
target = program.random_target(target_file, method)
value = random.choice(choices)
return cls(target, value)
class ComparisonOperatorSetting(TextSetting):
CHOICES = ['==', '!=', '<', '<=', '>', '>=']
class ArithmeticOperatorSetting(TextSetting):
CHOICES = ['+', '-', '*', '/', '%']
class NumericSetting(TextSetting):
CHOICES = ['-1', '0', '1']
class RelativeNumericSetting(TextWrapping):
CHOICES = [('(', '+1)'), ('(', '-1)'), ('(', '/2)'), ('(', '*2)'), ('(', '*3/2)'), ('(', '*2/3)')]
| 40.444444
| 121
| 0.663633
| 651
| 5,824
| 5.746544
| 0.093702
| 0.080192
| 0.079925
| 0.100775
| 0.892275
| 0.884523
| 0.847367
| 0.847367
| 0.835605
| 0.824913
| 0
| 0.004185
| 0.220467
| 5,824
| 143
| 122
| 40.727273
| 0.819824
| 0
| 0
| 0.720339
| 0
| 0
| 0.027301
| 0
| 0
| 0
| 0
| 0
| 0.042373
| 1
| 0.152542
| false
| 0
| 0.025424
| 0
| 0.415254
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
091526934ade68d9bee93adb3210e7afb28c6c2e
| 383
|
py
|
Python
|
chatette/prechecks/pyversion.py
|
SimGus/Chatette
|
fd22b6c2e4a27b222071c93772c2ae99387aa5c3
|
[
"MIT"
] | 263
|
2018-09-06T14:46:29.000Z
|
2022-03-31T08:40:19.000Z
|
chatette/prechecks/pyversion.py
|
IspML/Chatette
|
fd22b6c2e4a27b222071c93772c2ae99387aa5c3
|
[
"MIT"
] | 50
|
2018-09-06T14:50:18.000Z
|
2021-11-16T03:54:27.000Z
|
chatette/prechecks/pyversion.py
|
IspML/Chatette
|
fd22b6c2e4a27b222071c93772c2ae99387aa5c3
|
[
"MIT"
] | 49
|
2018-09-18T23:15:09.000Z
|
2022-03-02T11:23:08.000Z
|
from sys import version_info
def _get_python_version_as_str():
return str(version_info[0]) + '.' + str(version_info[1])
def _is_supported_python_version():
return version_info[0] == 3 \
or version_info[0] == 2 and version_info[1] == 7
def _is_deprecated_python_version():
return version_info[0] == 2 \
or version_info[0] == 3 and version_info[1] < 4
| 25.533333
| 60
| 0.681462
| 60
| 383
| 3.983333
| 0.366667
| 0.414226
| 0.251046
| 0.217573
| 0.259414
| 0.259414
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.195822
| 383
| 14
| 61
| 27.357143
| 0.730519
| 0
| 0
| 0
| 0
| 0
| 0.002611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.111111
| 0.333333
| 0.777778
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
09331a6a1431873c7f131c28b2b32692d1bc6f8b
| 16,127
|
py
|
Python
|
utils/network.py
|
msinto93/DDPG
|
e5a0e1cc4486234c2307036be3f13c2b18b1bf94
|
[
"MIT"
] | 20
|
2018-11-08T11:43:04.000Z
|
2022-03-04T20:44:11.000Z
|
utils/network.py
|
msinto93/DDPG
|
e5a0e1cc4486234c2307036be3f13c2b18b1bf94
|
[
"MIT"
] | null | null | null |
utils/network.py
|
msinto93/DDPG
|
e5a0e1cc4486234c2307036be3f13c2b18b1bf94
|
[
"MIT"
] | 9
|
2019-02-19T07:35:29.000Z
|
2021-05-05T16:37:09.000Z
|
'''
## Network ##
# Defines the DDPG Value (critic) and Policy (Actor) networks - with and without batch norm
@author: Mark Sinton (msinto93@gmail.com)
'''
import tensorflow as tf
import numpy as np
from utils.ops import dense, batchnorm, relu, tanh
class Critic:
def __init__(self, state, action, state_dims, action_dims, args, scope='critic'):
# state - State input to pass through the network
# action - Action input for which the Q value should be predicted
self.state = state
self.action = action
self.state_dims = np.prod(state_dims) #Used to calculate the fan_in of the state layer (e.g. if state_dims is (3,2) fan_in should equal 6)
self.action_dims = np.prod(action_dims)
self.args = args
self.scope = scope
# Networks params
dense1_size = self.args.dense1_size
dense2_size = self.args.dense2_size
final_layer_init = self.args.final_layer_init
with tf.variable_scope(self.scope):
self.dense1_mul = dense(self.state, dense1_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))), scope='dense1')
self.dense1 = relu(self.dense1_mul, scope='dense1')
#Merge first dense layer with action input to get second dense layer
self.dense2a = dense(self.dense1, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), scope='dense2a')
self.dense2b = dense(self.action, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), scope='dense2b')
self.dense2 = relu(self.dense2a + self.dense2b, scope='dense2')
self.output = dense(self.dense2, 1, weight_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init),
bias_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init), scope='output')
self.network_params = tf.trainable_variables(scope=self.scope)
self.action_grads = tf.gradients(self.output, self.action) # gradient of value output wrt action input - used to train actor network
def train_step(self, target_Q):
# target_Q - Target Q value (immediate reward plus expected Q from next state)
with tf.variable_scope(self.scope):
with tf.variable_scope('train'):
learning_rate = self.args.critic_learning_rate
l2_lambda = self.args.critic_l2_lambda
self.optimizer = tf.train.AdamOptimizer(learning_rate)
self.loss = tf.losses.mean_squared_error(target_Q, self.output)
self.l2_reg_loss = tf.add_n([tf.nn.l2_loss(v) for v in self.network_params if 'kernel' in v.name]) * l2_lambda
self.total_loss = self.loss + self.l2_reg_loss
train_step = self.optimizer.minimize(self.total_loss, var_list=self.network_params)
return train_step
class Actor:
def __init__(self, state, state_dims, action_dims, action_bound_low, action_bound_high, args, scope='actor'):
# state - State input to pass through the network
# action_bounds - Network will output in range [-1,1]. Multiply this by action_bound to get output within desired boundaries of action space
self.state = state
self.state_dims = np.prod(state_dims) #Used to calculate the fan_in of the state layer (e.g. if state_dims is (3,2) fan_in should equal 6)
self.action_dims = np.prod(action_dims)
self.action_bound_low = action_bound_low
self.action_bound_high = action_bound_high
self.args = args
self.scope = scope
# Networks params
dense1_size = self.args.dense1_size
dense2_size = self.args.dense2_size
final_layer_init = self.args.final_layer_init
with tf.variable_scope(self.scope):
self.dense1_mul = dense(self.state, dense1_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))), scope='dense1')
self.dense1 = relu(self.dense1_mul, scope='dense1')
self.dense2_mul = dense(self.dense1, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size))), 1/tf.sqrt(tf.to_float(dense1_size))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size))), 1/tf.sqrt(tf.to_float(dense1_size))), scope='dense2')
self.dense2 = relu(self.dense2_mul, scope='dense2')
self.output_mul = dense(self.dense2, self.action_dims, weight_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init),
bias_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init), scope='output')
self.output_tanh = tanh(self.output_mul, scope='output')
# Scale tanh output to lower and upper action bounds
self.output = tf.multiply(0.5, tf.multiply(self.output_tanh, (self.action_bound_high-self.action_bound_low)) + (self.action_bound_high+self.action_bound_low))
self.network_params = tf.trainable_variables(scope=self.scope)
def train_step(self, action_grads):
# action_grads - gradient of value output wrt action from critic network
with tf.variable_scope(self.scope):
with tf.variable_scope('train'):
learning_rate = self.args.actor_learning_rate
batch_size = self.args.batch_size
self.optimizer = tf.train.AdamOptimizer(learning_rate)
self.grads = tf.gradients(self.output, self.network_params, -action_grads)
self.grads_scaled = list(map(lambda x: tf.divide(x, batch_size), self.grads)) # tf.gradients sums over the batch dimension here, must therefore divide by batch_size to get mean gradients
train_step = self.optimizer.apply_gradients(zip(self.grads_scaled, self.network_params))
return train_step
class Critic_BN:
def __init__(self, state, action, state_dims, action_dims, args, is_training=False, scope='critic'):
# state - State input to pass through the network
# action - Action input for which the Q value should be predicted
self.state = state
self.action = action
self.state_dims = np.prod(state_dims) #Used to calculate the fan_in of the state layer (e.g. if state_dims is (3,2) fan_in should equal 6)
self.action_dims = np.prod(action_dims)
self.args = args
self.is_training = is_training
self.scope = scope
# Networks params
dense1_size = self.args.dense1_size
dense2_size = self.args.dense2_size
final_layer_init = self.args.final_layer_init
with tf.variable_scope(self.scope):
self.input_norm = batchnorm(self.state, self.is_training, scope='input_norm')
self.dense1_mul = dense(self.input_norm, dense1_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))), scope='dense1')
self.dense1_bn = batchnorm(self.dense1_mul, self.is_training, scope='dense1')
self.dense1 = relu(self.dense1_bn, scope='dense1')
#Merge first dense layer with action input to get second dense layer
self.dense2a = dense(self.dense1, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), scope='dense2a')
self.dense2b = dense(self.action, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), 1/tf.sqrt(tf.to_float(dense1_size+self.action_dims))), scope='dense2b')
self.dense2 = relu(self.dense2a + self.dense2b, scope='dense2')
self.output = dense(self.dense2, 1, weight_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init),
bias_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init), scope='output')
self.network_params = tf.trainable_variables(scope=self.scope)
self.action_grads = tf.gradients(self.output, self.action) # Gradient of value output wrt action input - used to train actor network
def train_step(self, target_Q):
# target_Q - Target Q value (immediate reward plus expected Q from next state)
with tf.variable_scope(self.scope):
with tf.variable_scope('train'):
learning_rate = self.args.critic_learning_rate
l2_lambda = self.args.critic_l2_lambda
self.optimizer = tf.train.AdamOptimizer(learning_rate)
self.loss = tf.losses.mean_squared_error(target_Q, self.output)
self.l2_reg_loss = tf.add_n([tf.nn.l2_loss(v) for v in self.network_params if 'kernel' in v.name]) * l2_lambda
self.total_loss = self.loss + self.l2_reg_loss
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, self.scope) # Ensure batch norm moving means and variances are updated every training step
with tf.control_dependencies(update_ops):
train_step = self.optimizer.minimize(self.total_loss, var_list=self.network_params)
return train_step
class Actor_BN:
def __init__(self, state, state_dims, action_dims, action_bound_low, action_bound_high, args, is_training=False, scope='actor'):
# state - State input to pass through the network
# action_bounds - Network will output in range [-1,1]. Multiply this by action_bound to get output within desired boundaries of action space
self.state = state
self.state_dims = np.prod(state_dims) #Used to calculate the fan_in of the state layer (e.g. if state_dims is (3,2) fan_in should equal 6)
self.action_dims = np.prod(action_dims)
self.action_bound_low = action_bound_low
self.action_bound_high = action_bound_high
self.args = args
self.is_training = is_training
self.scope = scope
# Networks params
dense1_size = self.args.dense1_size
dense2_size = self.args.dense2_size
final_layer_init = self.args.final_layer_init
with tf.variable_scope(self.scope):
self.input_norm = batchnorm(self.state, self.is_training, scope='input_norm')
self.dense1_mul = dense(self.input_norm, dense1_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(self.state_dims))), 1/tf.sqrt(tf.to_float(self.state_dims))), scope='dense1')
self.dense1_bn = batchnorm(self.dense1_mul, self.is_training, scope='dense1')
self.dense1 = relu(self.dense1_bn, scope='dense1')
self.dense2_mul = dense(self.dense1, dense2_size, weight_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size))), 1/tf.sqrt(tf.to_float(dense1_size))),
bias_init=tf.random_uniform_initializer((-1/tf.sqrt(tf.to_float(dense1_size))), 1/tf.sqrt(tf.to_float(dense1_size))), scope='dense2')
self.dense2_bn = batchnorm(self.dense2_mul, self.is_training, scope='dense2')
self.dense2 = relu(self.dense2_bn, scope='dense2')
self.output_mul = dense(self.dense2, self.action_dims, weight_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init),
bias_init=tf.random_uniform_initializer(-1*final_layer_init, final_layer_init), scope='output')
self.output_tanh = tanh(self.output_mul, scope='output')
# Scale tanh output to lower and upper action bounds
self.output = tf.multiply(0.5, tf.multiply(self.output_tanh, (self.action_bound_high-self.action_bound_low)) + (self.action_bound_high+self.action_bound_low))
self.network_params = tf.trainable_variables(scope=self.scope)
def train_step(self, action_grads):
# action_grads - gradient of value output wrt action from critic network
with tf.variable_scope(self.scope):
with tf.variable_scope('train'):
learning_rate = self.args.actor_learning_rate
batch_size = self.args.batch_size
self.optimizer = tf.train.AdamOptimizer(learning_rate)
self.grads = tf.gradients(self.output, self.network_params, -action_grads)
self.grads_scaled = list(map(lambda x: tf.divide(x, batch_size), self.grads)) # tf.gradients sums over the batch dimension here, must therefore divide by batch_size to get mean gradients
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS, self.scope) # Ensure batch norm moving means and variances are updated every training step
with tf.control_dependencies(update_ops):
train_step = self.optimizer.apply_gradients(zip(self.grads_scaled, self.network_params))
return train_step
| 61.789272
| 213
| 0.622496
| 2,109
| 16,127
| 4.513039
| 0.081081
| 0.046228
| 0.029418
| 0.037823
| 0.97615
| 0.970792
| 0.970792
| 0.965119
| 0.965119
| 0.965119
| 0
| 0.017018
| 0.282198
| 16,127
| 261
| 214
| 61.789272
| 0.8052
| 0.142866
| 0
| 0.907895
| 0
| 0
| 0.017742
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.019737
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11888b2afa4a815fc3170e815ec7885f83cb6b27
| 91,997
|
py
|
Python
|
UFO_models/SMEFTsim_top_MwScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2020-12-29T03:42:43.000Z
|
2021-09-22T09:57:37.000Z
|
UFO_models/SMEFTsim_top_MwScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 3
|
2021-05-19T11:06:59.000Z
|
2021-12-11T00:12:02.000Z
|
UFO_models/SMEFTsim_top_MwScheme_UFO/decays.py
|
matthewfeickert/SMEFTsim
|
db7d4a80bdcff424eee27dde71f1eb09ac894039
|
[
"MIT"
] | 4
|
2021-09-22T09:57:39.000Z
|
2022-03-29T16:09:36.000Z
|
# This file was automatically created by FeynRules 2.3.35
# Mathematica version: 12.1.0 for Linux x86 (64-bit) (March 18, 2020)
# Date: Fri 8 Jan 2021 10:13:06
from object_library import all_decays, Decay
import particles as P
Decay_b = Decay(name = 'Decay_b',
particle = P.b,
partial_widths = {(P.W__minus__,P.t):'((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 8*ee**2*LambdaSMEFT**2*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 128*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**2*vevhat**2 + ee**2*(16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl322**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) - 4*cHl322*(12*cHtbRe*MB*MT*MW**2 + 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + cll1221*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) + 48*ee*MW**2*(cbWRe*(-4*cHQ3*MB**3 - cll1221*MB**3 - 2*cHtbRe*MB**2*MT + 4*cHQ3*MB*MT**2 + cll1221*MB*MT**2 + 2*cHtbRe*MT**3 + 4*cHQ3*MB*MW**2 + cll1221*MB*MW**2 - 2*cHtbRe*MT*MW**2 + 2*cHl311*MB*(MB**2 - MT**2 - MW**2) + 2*cHl322*MB*(MB**2 - MT**2 - MW**2)) - 2*cHtbIm*(ctWIm*MB*(MB**2 - MT**2 - MW**2) + cbWIm*MT*(MB**2 - MT**2 + MW**2)) + ctWRe*(2*cHtbRe*MB*(MB**2 - MT**2 - MW**2) - (2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*MT*(MB**2 - MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MB**3*MW**2*sth**2)'})
Decay_c = Decay(name = 'Decay_c',
particle = P.c,
partial_widths = {(P.W__plus__,P.s):'((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys) - 128*MW**2*sth**2*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl322**2*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl322**2*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl322**2*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl322**2*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 4*cHl311**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 4*cHl311*(2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - cll1221*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(2*cHl322*cuWRe*MC**3*yc - cll1221*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC*MS**2*yc + cll1221*cuWRe*MC*MS**2*yc - 2*cHl322*cuWRe*MC*MW**2*yc + cll1221*cuWRe*MC*MW**2*yc - 2*cdWRe*cHl322*MC**2*MS*ys + cdWRe*cll1221*MC**2*MS*ys + 2*cdWRe*cHl322*MS**3*ys - cdWRe*cll1221*MS**3*ys - 2*cdWRe*cHl322*MS*MW**2*ys + cdWRe*cll1221*MS*MW**2*ys + 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys - 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys - 2*cHudIm*cuWIm*MS**3*yc**2*ys + 2*cHudRe*cuWRe*MS**3*yc**2*ys + 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys - 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys + 2*cdWIm*cHudIm*MC**3*yc*ys**2 + 2*cdWRe*cHudRe*MC**3*yc*ys**2 - 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 - 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 2*cHl311*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 4*cHj3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MC**3*MW**2*sth**2)'})
Decay_d = Decay(name = 'Decay_d',
particle = P.d,
partial_widths = {(P.W__minus__,P.u):'((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup) - 128*MW**2*sth**2*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl322**2*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl322**2*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl322**2*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl322**2*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 4*cHl311**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 4*cHl311*(2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - cll1221*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(yup*(-2*cHl322*cuWRe*MD**2*MU + cll1221*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MU**3 - cll1221*cuWRe*MU**3 - 2*cHl322*cuWRe*MU*MW**2 + cll1221*cuWRe*MU*MW**2 + 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cHl311*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cdWIm*cHudIm*MD**2*MU*ydo**2 + 2*cdWIm*cHudIm*MU**3*ydo**2 - 2*cdWIm*cHudIm*MU*MW**2*ydo**2 - 2*cHudIm*cuWIm*MD**3*ydo*yup + 2*cHudRe*cuWRe*MD**3*ydo*yup + 2*cHudIm*cuWIm*MD*MU**2*ydo*yup - 2*cHudRe*cuWRe*MD*MU**2*ydo*yup + 2*cHudIm*cuWIm*MD*MW**2*ydo*yup - 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(2*cHl322*MD**3 - cll1221*MD**3 - 2*cHl322*MD*MU**2 + cll1221*MD*MU**2 - 2*cHl322*MD*MW**2 + cll1221*MD*MW**2 + 2*cHl311*MD*(MD**2 - MU**2 - MW**2) + 4*cHj3*MD*(-MD**2 + MU**2 + MW**2) - 2*cHudRe*MD**2*MU*ydo*yup + 2*cHudRe*MU**3*ydo*yup - 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MD**3*MW**2*sth**2)'})
Decay_e__minus__ = Decay(name = 'Decay_e__minus__',
particle = P.e__minus__,
partial_widths = {(P.W__minus__,P.ve):'((Me**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*Me**2 + 32*ee**2*LambdaSMEFT**4*MW**2 + 8*(2*cHl311 - 2*cHl322 + cll1221)*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm11**2 + ceWRe11**2)*MW**2*(2*Me**2 + MW**2)*sth**2*vevhat**2 + (2*cHl311 - 2*cHl322 + cll1221)**2*ee**2*(Me**2 + 2*MW**2)*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth*vevhat*cmath.sqrt(2) - 48*ceWRe11*(2*cHl311 - 2*cHl322 + cll1221)*ee*Me*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*Me**3*MW**2*sth**2)'})
Decay_H = Decay(name = 'Decay_H',
particle = P.H,
partial_widths = {(P.a,P.a):'(MH**3*(gHaa**2*LambdaSMEFT**4 - 2*gHaa*LambdaSMEFT**2*(-cHB + cHWB*cth*sth + (cHB - cHW)*sth**2)*vevhat**2 + (2*cHB*sth*(cHWB*cth - cHW*sth)*(-1 + sth**2) + 2*cHBtil*sth*(cHWBtil*cth - cHWtil*sth)*(-1 + sth**2) + cHB**2*(-1 + sth**2)**2 + cHBtil**2*(-1 + sth**2)**2 + sth**2*(cHWB**2 + cHWBtil**2 - 2*(cHW*cHWB + cHWBtil*cHWtil)*cth*sth + (cHW**2 - cHWB**2 - cHWBtil**2 + cHWtil**2)*sth**2))*vevhat**4))/(4.*cmath.pi*LambdaSMEFT**4*vevhat**2)',
(P.a,P.Z):'((MH**2 - MZ**2)**3*(gHza**2*LambdaSMEFT**4 - 2*gHza*LambdaSMEFT**2*(cHWB + 2*(cHB - cHW)*cth*sth - 2*cHWB*sth**2)*vevhat**2 + (cHWB**2*(1 - 2*sth**2)**2 + cHWBtil**2*(1 - 2*sth**2)**2 - 4*(cHB**2 + cHBtil**2 - 2*cHB*cHW + cHW**2 - 2*cHBtil*cHWtil + cHWtil**2)*sth**2*(-1 + sth**2) - 4*(cHB - cHW)*cHWB*cth*sth*(-1 + 2*sth**2) - 4*cHWBtil*(cHBtil - cHWtil)*cth*sth*(-1 + 2*sth**2))*vevhat**4))/(8.*cmath.pi*LambdaSMEFT**4*MH**3*vevhat**2)',
(P.b,P.b__tilde__):'(3*(-64*LambdaSMEFT**4*MB**2*yb**2 + 16*LambdaSMEFT**4*MH**2*yb**2 + 8*LambdaSMEFT**2*(4*MB**2 - MH**2)*vevhat**2*yb*(4*cbHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yb) + vevhat**4*(16*cbHIm**2*MH**2 + 16*cbHRe**2*(-4*MB**2 + MH**2) + 8*cbHRe*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*MB**2 - MH**2)*yb - (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(4*MB**2 - MH**2)*yb**2))*cmath.sqrt(-4*MB**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.c,P.c__tilde__):'(3*(-64*LambdaSMEFT**4*MC**2 + 16*LambdaSMEFT**4*MH**2 - 8*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221 - 4*cuHRe)*LambdaSMEFT**2*(4*MC**2 - MH**2)*vevhat**2 + (-16*cHl311**2*MC**2 - 32*cHl311*cHl322*MC**2 - 16*cHl322**2*MC**2 + 16*cHl311*cll1221*MC**2 + 16*cHl322*cll1221*MC**2 - 4*cll1221**2*MC**2 - 64*cHl311*cuHRe*MC**2 - 64*cHl322*cuHRe*MC**2 + 32*cll1221*cuHRe*MC**2 - 64*cuHRe**2*MC**2 + 4*cHl311**2*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 16*cuHIm**2*MH**2 + 16*cHl311*cuHRe*MH**2 + 16*cHl322*cuHRe*MH**2 - 8*cll1221*cuHRe*MH**2 + 16*cuHRe**2*MH**2 - 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(4*MC**2 - MH**2) + 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(4*MC**2 - MH**2) + 16*cHbox**2*(-4*MC**2 + MH**2) + cHDD**2*(-4*MC**2 + MH**2))*vevhat**4)*yc**2*cmath.sqrt(-4*MC**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.d,P.d__tilde__):'(3*(-64*LambdaSMEFT**4*MD**2 + 16*LambdaSMEFT**4*MH**2 + 8*(4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*LambdaSMEFT**2*(4*MD**2 - MH**2)*vevhat**2 + (-4*cHDD**2*MD**2 - 16*cHDD*cHl311*MD**2 - 16*cHl311**2*MD**2 - 16*cHDD*cHl322*MD**2 - 32*cHl311*cHl322*MD**2 - 16*cHl322**2*MD**2 + 8*cHDD*cll1221*MD**2 + 16*cHl311*cll1221*MD**2 + 16*cHl322*cll1221*MD**2 - 4*cll1221**2*MD**2 + 16*cdHIm**2*MH**2 + cHDD**2*MH**2 + 4*cHDD*cHl311*MH**2 + 4*cHl311**2*MH**2 + 4*cHDD*cHl322*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 2*cHDD*cll1221*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221)*(4*MD**2 - MH**2) + 8*cdHRe*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*MD**2 - MH**2) + 16*cdHRe**2*(-4*MD**2 + MH**2) + 16*cHbox**2*(-4*MD**2 + MH**2))*vevhat**4)*ydo**2*cmath.sqrt(-4*MD**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.e__minus__,P.e__plus__):'((-64*LambdaSMEFT**4*Me**2*ye**2 + 16*LambdaSMEFT**4*MH**2*ye**2 + 8*LambdaSMEFT**2*(4*Me**2 - MH**2)*vevhat**2*ye*(4*ceHRe11 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ye) + vevhat**4*(16*ceHIm11**2*MH**2 + 16*ceHRe11**2*(-4*Me**2 + MH**2) + 8*ceHRe11*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*(4*Me**2 - MH**2)*ye - (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(4*Me**2 - MH**2)*ye**2))*cmath.sqrt(-4*Me**2 + MH**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.g,P.g):'(MH**3*(gHgg2**2*LambdaSMEFT**4*MH**4 - 4*gHgg2*LambdaSMEFT**2*MH**2*MT**2*(gHgg1*LambdaSMEFT**2 + cHG*vevhat**2) + 4*MT**4*(gHgg1**2*LambdaSMEFT**4 + 2*cHG*gHgg1*LambdaSMEFT**2*vevhat**2 + (cHG**2 + cHGtil**2)*vevhat**4)))/(2.*cmath.pi*LambdaSMEFT**4*MT**4*vevhat**2)',
(P.mu__minus__,P.mu__plus__):'((16*LambdaSMEFT**4*MH**2*ym**2 - 64*LambdaSMEFT**4*MMU**2*ym**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MMU**2)*vevhat**2*ym*(4*ceHRe22 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ym) + vevhat**4*(16*ceHIm22**2*MH**2 + (MH**2 - 4*MMU**2)*(4*ceHRe22 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ym)**2))*cmath.sqrt(MH**2 - 4*MMU**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.s,P.s__tilde__):'(3*(16*LambdaSMEFT**4*MH**2 - 64*LambdaSMEFT**4*MS**2 - 8*(4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*LambdaSMEFT**2*(MH**2 - 4*MS**2)*vevhat**2 + (16*cdHIm**2*MH**2 + (4*cdHRe - 4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*(MH**2 - 4*MS**2))*vevhat**4)*ys**2*cmath.sqrt(MH**2 - 4*MS**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.ta__minus__,P.ta__plus__):'((16*LambdaSMEFT**4*MH**2*ytau**2 - 64*LambdaSMEFT**4*MTA**2*ytau**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MTA**2)*vevhat**2*ytau*(4*ceHRe33 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ytau) + vevhat**4*(16*ceHIm33**2*MH**2 + (MH**2 - 4*MTA**2)*(4*ceHRe33 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*ytau)**2))*cmath.sqrt(MH**2 - 4*MTA**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.t,P.t__tilde__):'(3*(16*LambdaSMEFT**4*MH**2*yt**2 - 64*LambdaSMEFT**4*MT**2*yt**2 - 8*LambdaSMEFT**2*(MH**2 - 4*MT**2)*vevhat**2*yt*(4*ctHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yt) + vevhat**4*(16*ctHIm**2*MH**2 + (MH**2 - 4*MT**2)*(4*ctHRe + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)*yt)**2))*cmath.sqrt(MH**4 - 4*MH**2*MT**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**3)',
(P.u,P.u__tilde__):'(3*(16*LambdaSMEFT**4*MH**2 - 64*LambdaSMEFT**4*MU**2 + 8*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221 - 4*cuHRe)*LambdaSMEFT**2*(MH**2 - 4*MU**2)*vevhat**2 + (4*cHl311**2*MH**2 + 8*cHl311*cHl322*MH**2 + 4*cHl322**2*MH**2 - 4*cHl311*cll1221*MH**2 - 4*cHl322*cll1221*MH**2 + cll1221**2*MH**2 + 16*cuHIm**2*MH**2 + 16*cHl311*cuHRe*MH**2 + 16*cHl322*cuHRe*MH**2 - 8*cll1221*cuHRe*MH**2 + 16*cuHRe**2*MH**2 - 16*cHl311**2*MU**2 - 32*cHl311*cHl322*MU**2 - 16*cHl322**2*MU**2 + 16*cHl311*cll1221*MU**2 + 16*cHl322*cll1221*MU**2 - 4*cll1221**2*MU**2 - 64*cHl311*cuHRe*MU**2 - 64*cHl322*cuHRe*MU**2 + 32*cll1221*cuHRe*MU**2 - 64*cuHRe**2*MU**2 + 16*cHbox**2*(MH**2 - 4*MU**2) + cHDD**2*(MH**2 - 4*MU**2) + 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(MH**2 - 4*MU**2) - 8*cHbox*(cHDD + 2*cHl311 + 2*cHl322 - cll1221 + 4*cuHRe)*(MH**2 - 4*MU**2))*vevhat**4)*yup**2*cmath.sqrt(MH**2 - 4*MU**2))/(256.*cmath.pi*LambdaSMEFT**4*MH**2)',
(P.W__minus__,P.W__plus__):'(vevhat**2*(16*ee**4*LambdaSMEFT**4*MH**4 - 64*ee**4*LambdaSMEFT**4*MH**2*MW**2 + 192*ee**4*LambdaSMEFT**4*MW**4 - 1536*cHW*ee**2*LambdaSMEFT**2*MW**4*(MH**2 - 2*MW**2)*sth**2 + 2048*MW**4*(cHWtil**2*MH**2*(MH**2 - 4*MW**2) + cHW**2*(MH**4 - 4*MH**2*MW**2 + 6*MW**4))*sth**4 + 8*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**4*LambdaSMEFT**2*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*vevhat**2 - 384*cHW*(4*cHbox - cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MW**4*(MH**2 - 2*MW**2)*sth**2*vevhat**2 + (-4*cHbox + cHDD + 2*cHl311 + 2*cHl322 - cll1221)**2*ee**4*(MH**4 - 4*MH**2*MW**2 + 12*MW**4)*vevhat**4)*cmath.sqrt(MH**2 - 4*MW**2))/(4096.*cmath.pi*LambdaSMEFT**4*MH**2*MW**4*sth**4)',
(P.Z,P.Z):'(vevhat**2*(16*ee**4*LambdaSMEFT**4*MH**4 - 64*ee**4*LambdaSMEFT**4*MH**2*MZ**2 + 192*ee**4*LambdaSMEFT**4*MZ**4 - 1536*cHW*ee**2*LambdaSMEFT**2*MZ**4*(MH**2 - 2*MZ**2)*sth**2 - 1536*cHWB*cth*ee**2*LambdaSMEFT**2*MZ**4*(MH**2 - 2*MZ**2)*sth**3 + 512*MZ**4*(4*cHWtil**2*MH**2*(MH**2 - 4*MZ**2) - 3*cHB*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 6*cHW*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 4*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**4 + 512*cth*MZ**4*(8*cHWBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + 3*cHWB*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 8*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**5 + 512*MZ**4*(4*(cHWBtil**2 + 2*(cHBtil - 2*cHWtil)*cHWtil)*MH**2*(MH**2 - 4*MZ**2) - 3*cHW*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) - 16*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + 4*cHWB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + cHB*(3*ee**2*LambdaSMEFT**2*(MH**2 - 2*MZ**2) + 8*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4)))*sth**6 + 4096*cth*MZ**4*(cHWBtil*(cHBtil - 3*cHWtil)*MH**2*(MH**2 - 4*MZ**2) + cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**7 + 2048*MZ**4*(-6*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 6*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) + 6*cHW**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*((cHWBtil**2 - 2*cHWtil**2)*MH**2*(MH**2 - 4*MZ**2) + cHWB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4)))*sth**8 - 4096*cth*MZ**4*(cHWBtil*(2*cHBtil - 3*cHWtil)*MH**2*(MH**2 - 4*MZ**2) + 2*cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 3*cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**9 - 2048*MZ**4*(4*cHW**2*MH**4 - 3*cHWB**2*MH**4 - 3*cHWBtil**2*MH**4 + 4*cHWtil**2*MH**4 - 16*cHW**2*MH**2*MZ**2 + 12*cHWB**2*MH**2*MZ**2 + 12*cHWBtil**2*MH**2*MZ**2 - 16*cHWtil**2*MH**2*MZ**2 + 24*cHW**2*MZ**4 - 18*cHWB**2*MZ**4 - 6*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + 2*cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + 2*cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 6*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**10 + 4096*cth*MZ**4*(cHWBtil*(cHBtil - cHWtil)*MH**2*(MH**2 - 4*MZ**2) + cHB*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - cHW*cHWB*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**11 + 2048*MZ**4*(cHW**2*MH**4 - cHWB**2*MH**4 - cHWBtil**2*MH**4 + cHWtil**2*MH**4 - 4*cHW**2*MH**2*MZ**2 + 4*cHWB**2*MH**2*MZ**2 + 4*cHWBtil**2*MH**2*MZ**2 - 4*cHWtil**2*MH**2*MZ**2 + 6*cHW**2*MZ**4 - 6*cHWB**2*MZ**4 - 2*cHBtil*cHWtil*MH**2*(MH**2 - 4*MZ**2) + cHBtil**2*(MH**4 - 4*MH**2*MZ**2) + cHB**2*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4) - 2*cHB*cHW*(MH**4 - 4*MH**2*MZ**2 + 6*MZ**4))*sth**12 + 8*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**4*LambdaSMEFT**2*(MH**4 - 4*MH**2*MZ**2 + 12*MZ**4)*vevhat**2 - 384*cHW*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**2*vevhat**2 - 384*cHWB*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**3*vevhat**2 - 384*(cHB - 2*cHW)*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**4*vevhat**2 + 384*cHWB*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**5*vevhat**2 + 384*(cHB - cHW)*(4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*MZ**4*(MH**2 - 2*MZ**2)*sth**6*vevhat**2 + (4*cHbox + cHDD - 2*cHl311 - 2*cHl322 + cll1221)**2*ee**4*(MH**4 - 4*MH**2*MZ**2 + 12*MZ**4)*vevhat**4)*cmath.sqrt(MH**2 - 4*MZ**2))/(8192.*cth**4*cmath.pi*LambdaSMEFT**4*MH**2*MZ**4*sth**4)'})
Decay_mu__minus__ = Decay(name = 'Decay_mu__minus__',
particle = P.mu__minus__,
partial_widths = {(P.W__minus__,P.vm):'((MMU**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MMU**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 8*(2*cHl311 - 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm22**2 + ceWRe22**2)*MW**2*(2*MMU**2 + MW**2)*sth**2*vevhat**2 + (-2*cHl311 + 2*cHl322 + cll1221)**2*ee**2*(MMU**2 + 2*MW**2)*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth*vevhat*cmath.sqrt(2) + 48*ceWRe22*(2*cHl311 - 2*cHl322 - cll1221)*ee*MMU*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*MMU**3*MW**2*sth**2)'})
Decay_s = Decay(name = 'Decay_s',
particle = P.s,
partial_widths = {(P.W__minus__,P.c):'((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys) - 128*MW**2*sth**2*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl322**2*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl322**2*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl322**2*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl322**2*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 4*cHl311**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 4*cHl311*(2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - cll1221*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(2*cHl322*cuWRe*MC**3*yc - cll1221*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC*MS**2*yc + cll1221*cuWRe*MC*MS**2*yc - 2*cHl322*cuWRe*MC*MW**2*yc + cll1221*cuWRe*MC*MW**2*yc - 2*cdWRe*cHl322*MC**2*MS*ys + cdWRe*cll1221*MC**2*MS*ys + 2*cdWRe*cHl322*MS**3*ys - cdWRe*cll1221*MS**3*ys - 2*cdWRe*cHl322*MS*MW**2*ys + cdWRe*cll1221*MS*MW**2*ys + 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys - 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys - 2*cHudIm*cuWIm*MS**3*yc**2*ys + 2*cHudRe*cuWRe*MS**3*yc**2*ys + 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys - 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys + 2*cdWIm*cHudIm*MC**3*yc*ys**2 + 2*cdWRe*cHudRe*MC**3*yc*ys**2 - 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 - 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 2*cHl311*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 4*cHj3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MS**3*MW**2*sth**2)'})
Decay_ta__minus__ = Decay(name = 'Decay_ta__minus__',
particle = P.ta__minus__,
partial_widths = {(P.W__minus__,P.vt):'((MTA**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MTA**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 8*(2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm33**2 + ceWRe33**2)*MW**2*(2*MTA**2 + MW**2)*sth**2*vevhat**2 + (-2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)**2*ee**2*(MTA**2 + 2*MW**2)*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth*vevhat*cmath.sqrt(2) + 48*ceWRe33*(2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*MTA*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1024.*cmath.pi*LambdaSMEFT**4*MTA**3*MW**2*sth**2)'})
Decay_t = Decay(name = 'Decay_t',
particle = P.t,
partial_widths = {(P.W__plus__,P.b):'((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 8*ee**2*LambdaSMEFT**2*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 128*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**2*vevhat**2 + ee**2*(16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl322**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) - 4*cHl322*(12*cHtbRe*MB*MT*MW**2 + 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + cll1221*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) + 48*ee*MW**2*(cbWRe*(-4*cHQ3*MB**3 - cll1221*MB**3 - 2*cHtbRe*MB**2*MT + 4*cHQ3*MB*MT**2 + cll1221*MB*MT**2 + 2*cHtbRe*MT**3 + 4*cHQ3*MB*MW**2 + cll1221*MB*MW**2 - 2*cHtbRe*MT*MW**2 + 2*cHl311*MB*(MB**2 - MT**2 - MW**2) + 2*cHl322*MB*(MB**2 - MT**2 - MW**2)) - 2*cHtbIm*(ctWIm*MB*(MB**2 - MT**2 - MW**2) + cbWIm*MT*(MB**2 - MT**2 + MW**2)) + ctWRe*(2*cHtbRe*MB*(MB**2 - MT**2 - MW**2) - (2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*MT*(MB**2 - MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MT**3*MW**2*sth**2)'})
Decay_u = Decay(name = 'Decay_u',
particle = P.u,
partial_widths = {(P.W__plus__,P.d):'((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup) - 128*MW**2*sth**2*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl322**2*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl322**2*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl322**2*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl322**2*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 4*cHl311**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 4*cHl311*(2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - cll1221*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(yup*(-2*cHl322*cuWRe*MD**2*MU + cll1221*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MU**3 - cll1221*cuWRe*MU**3 - 2*cHl322*cuWRe*MU*MW**2 + cll1221*cuWRe*MU*MW**2 + 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cHl311*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cdWIm*cHudIm*MD**2*MU*ydo**2 + 2*cdWIm*cHudIm*MU**3*ydo**2 - 2*cdWIm*cHudIm*MU*MW**2*ydo**2 - 2*cHudIm*cuWIm*MD**3*ydo*yup + 2*cHudRe*cuWRe*MD**3*ydo*yup + 2*cHudIm*cuWIm*MD*MU**2*ydo*yup - 2*cHudRe*cuWRe*MD*MU**2*ydo*yup + 2*cHudIm*cuWIm*MD*MW**2*ydo*yup - 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(2*cHl322*MD**3 - cll1221*MD**3 - 2*cHl322*MD*MU**2 + cll1221*MD*MU**2 - 2*cHl322*MD*MW**2 + cll1221*MD*MW**2 + 2*cHl311*MD*(MD**2 - MU**2 - MW**2) + 4*cHj3*MD*(-MD**2 + MU**2 + MW**2) - 2*cHudRe*MD**2*MU*ydo*yup + 2*cHudRe*MU**3*ydo*yup - 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(1024.*cmath.pi*LambdaSMEFT**4*MU**3*MW**2*sth**2)'})
Decay_W__plus__ = Decay(name = 'Decay_W__plus__',
particle = P.W__plus__,
partial_widths = {(P.c,P.s__tilde__):'-((16*ee**2*LambdaSMEFT**4*MC**4 - 32*ee**2*LambdaSMEFT**4*MC**2*MS**2 + 16*ee**2*LambdaSMEFT**4*MS**4 + 16*ee**2*LambdaSMEFT**4*MC**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MS**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MC**4 + cll1221*MC**4 + 4*cHl322*MC**2*MS**2 - 2*cll1221*MC**2*MS**2 - 2*cHl322*MS**4 + cll1221*MS**4 - 2*cHl322*MC**2*MW**2 + cll1221*MC**2*MW**2 - 2*cHl322*MS**2*MW**2 + cll1221*MS**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 12*cHudRe*MC*MS*MW**2*yc*ys) - 128*MW**2*sth**2*vevhat**2*(cuWIm**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 + cuWRe**2*(-2*MC**4 - 2*MS**4 + MS**2*MW**2 + MW**4 + MC**2*(4*MS**2 + MW**2))*yc**2 - 12*cdWIm*cuWIm*MC*MS*MW**2*yc*ys + 12*cdWRe*cuWRe*MC*MS*MW**2*yc*ys - (cdWIm**2 + cdWRe**2)*(2*MC**4 + 2*MS**4 - MS**2*MW**2 - MW**4 - MC**2*(4*MS**2 + MW**2))*ys**2) + ee**2*vevhat**4*(4*cHl322**2*MC**4 - 4*cHl322*cll1221*MC**4 + cll1221**2*MC**4 - 8*cHl322**2*MC**2*MS**2 + 8*cHl322*cll1221*MC**2*MS**2 - 2*cll1221**2*MC**2*MS**2 + 4*cHl322**2*MS**4 - 4*cHl322*cll1221*MS**4 + cll1221**2*MS**4 + 4*cHl322**2*MC**2*MW**2 - 4*cHl322*cll1221*MC**2*MW**2 + cll1221**2*MC**2*MW**2 + 4*cHl322**2*MS**2*MW**2 - 4*cHl322*cll1221*MS**2*MW**2 + cll1221**2*MS**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 4*cHl311**2*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 48*cHl322*cHudRe*MC*MS*MW**2*yc*ys + 24*cHudRe*cll1221*MC*MS*MW**2*yc*ys + 4*cHudIm**2*MC**4*yc**2*ys**2 + 4*cHudRe**2*MC**4*yc**2*ys**2 - 8*cHudIm**2*MC**2*MS**2*yc**2*ys**2 - 8*cHudRe**2*MC**2*MS**2*yc**2*ys**2 + 4*cHudIm**2*MS**4*yc**2*ys**2 + 4*cHudRe**2*MS**4*yc**2*ys**2 + 4*cHudIm**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MC**2*MW**2*yc**2*ys**2 + 4*cHudIm**2*MS**2*MW**2*yc**2*ys**2 + 4*cHudRe**2*MS**2*MW**2*yc**2*ys**2 - 8*cHudIm**2*MW**4*yc**2*ys**2 - 8*cHudRe**2*MW**4*yc**2*ys**2 - 8*cHj3*(-(cll1221*MC**4) + 2*cll1221*MC**2*MS**2 - cll1221*MS**4 - cll1221*MC**2*MW**2 - cll1221*MS**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) + 2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys) + 4*cHl311*(2*cHl322*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - cll1221*(MC**4 + MS**4 + MS**2*MW**2 - 2*MW**4 + MC**2*(-2*MS**2 + MW**2)) - 12*cHudRe*MC*MS*MW**2*yc*ys)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(2*cHl322*cuWRe*MC**3*yc - cll1221*cuWRe*MC**3*yc - 2*cHl322*cuWRe*MC*MS**2*yc + cll1221*cuWRe*MC*MS**2*yc - 2*cHl322*cuWRe*MC*MW**2*yc + cll1221*cuWRe*MC*MW**2*yc - 2*cdWRe*cHl322*MC**2*MS*ys + cdWRe*cll1221*MC**2*MS*ys + 2*cdWRe*cHl322*MS**3*ys - cdWRe*cll1221*MS**3*ys - 2*cdWRe*cHl322*MS*MW**2*ys + cdWRe*cll1221*MS*MW**2*ys + 2*cHudIm*cuWIm*MC**2*MS*yc**2*ys - 2*cHudRe*cuWRe*MC**2*MS*yc**2*ys - 2*cHudIm*cuWIm*MS**3*yc**2*ys + 2*cHudRe*cuWRe*MS**3*yc**2*ys + 2*cHudIm*cuWIm*MS*MW**2*yc**2*ys - 2*cHudRe*cuWRe*MS*MW**2*yc**2*ys + 2*cdWIm*cHudIm*MC**3*yc*ys**2 + 2*cdWRe*cHudRe*MC**3*yc*ys**2 - 2*cdWIm*cHudIm*MC*MS**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MS**2*yc*ys**2 - 2*cdWIm*cHudIm*MC*MW**2*yc*ys**2 - 2*cdWRe*cHudRe*MC*MW**2*yc*ys**2 + 2*cHl311*(cuWRe*MC*(MC**2 - MS**2 - MW**2)*yc + cdWRe*MS*(-MC**2 + MS**2 - MW**2)*ys) + 4*cHj3*(cuWRe*MC*(-MC**2 + MS**2 + MW**2)*yc + cdWRe*MS*(MC**2 - MS**2 + MW**2)*ys))*cmath.sqrt(2))*cmath.sqrt(MC**4 + (MS**2 - MW**2)**2 - 2*MC**2*(MS**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)',
(P.t,P.b__tilde__):'-((16*ee**2*LambdaSMEFT**4*MB**4 - 32*ee**2*LambdaSMEFT**4*MB**2*MT**2 + 16*ee**2*LambdaSMEFT**4*MT**4 + 16*ee**2*LambdaSMEFT**4*MB**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MT**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 - 8*ee**2*LambdaSMEFT**2*(-4*cHQ3*MB**4 - cll1221*MB**4 + 8*cHQ3*MB**2*MT**2 + 2*cll1221*MB**2*MT**2 - 4*cHQ3*MT**4 - cll1221*MT**4 - 4*cHQ3*MB**2*MW**2 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - 4*cHQ3*MT**2*MW**2 - cll1221*MT**2*MW**2 + 8*cHQ3*MW**4 + 2*cll1221*MW**4 + 2*cHl311*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)))*vevhat**2 - 128*MW**2*(-12*cbWIm*ctWIm*MB*MT*MW**2 + 12*cbWRe*ctWRe*MB*MT*MW**2 - (ctWIm**2 + ctWRe**2)*(2*MB**4 + 2*MT**4 - MT**2*MW**2 - MW**4 - MB**2*(4*MT**2 + MW**2)) + cbWIm**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)) + cbWRe**2*(-2*MB**4 - 2*MT**4 + MT**2*MW**2 + MW**4 + MB**2*(4*MT**2 + MW**2)))*sth**2*vevhat**2 + ee**2*(16*cHQ3**2*MB**4 + 4*cHtbIm**2*MB**4 + 4*cHtbRe**2*MB**4 + 8*cHQ3*cll1221*MB**4 + cll1221**2*MB**4 - 32*cHQ3**2*MB**2*MT**2 - 8*cHtbIm**2*MB**2*MT**2 - 8*cHtbRe**2*MB**2*MT**2 - 16*cHQ3*cll1221*MB**2*MT**2 - 2*cll1221**2*MB**2*MT**2 + 16*cHQ3**2*MT**4 + 4*cHtbIm**2*MT**4 + 4*cHtbRe**2*MT**4 + 8*cHQ3*cll1221*MT**4 + cll1221**2*MT**4 + 16*cHQ3**2*MB**2*MW**2 + 4*cHtbIm**2*MB**2*MW**2 + 4*cHtbRe**2*MB**2*MW**2 + 8*cHQ3*cll1221*MB**2*MW**2 + cll1221**2*MB**2*MW**2 + 96*cHQ3*cHtbRe*MB*MT*MW**2 + 24*cHtbRe*cll1221*MB*MT*MW**2 + 16*cHQ3**2*MT**2*MW**2 + 4*cHtbIm**2*MT**2*MW**2 + 4*cHtbRe**2*MT**2*MW**2 + 8*cHQ3*cll1221*MT**2*MW**2 + cll1221**2*MT**2*MW**2 - 32*cHQ3**2*MW**4 - 8*cHtbIm**2*MW**4 - 8*cHtbRe**2*MW**4 - 16*cHQ3*cll1221*MW**4 - 2*cll1221**2*MW**4 + 4*cHl311**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl322**2*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + 4*cHl311*(-(cll1221*MB**4) + 2*cll1221*MB**2*MT**2 - cll1221*MT**4 - cll1221*MB**2*MW**2 - 12*cHtbRe*MB*MT*MW**2 - cll1221*MT**2*MW**2 + 2*cll1221*MW**4 + 2*cHl322*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) - 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))) - 4*cHl322*(12*cHtbRe*MB*MT*MW**2 + 4*cHQ3*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2)) + cll1221*(MB**4 + MT**4 + MT**2*MW**2 - 2*MW**4 + MB**2*(-2*MT**2 + MW**2))))*vevhat**4 + 192*ee*LambdaSMEFT**2*MW**2*(ctWRe*MT*(MB**2 - MT**2 + MW**2) + cbWRe*MB*(-MB**2 + MT**2 + MW**2))*sth*vevhat*cmath.sqrt(2) + 48*ee*MW**2*(cbWRe*(-4*cHQ3*MB**3 - cll1221*MB**3 - 2*cHtbRe*MB**2*MT + 4*cHQ3*MB*MT**2 + cll1221*MB*MT**2 + 2*cHtbRe*MT**3 + 4*cHQ3*MB*MW**2 + cll1221*MB*MW**2 - 2*cHtbRe*MT*MW**2 + 2*cHl311*MB*(MB**2 - MT**2 - MW**2) + 2*cHl322*MB*(MB**2 - MT**2 - MW**2)) - 2*cHtbIm*(ctWIm*MB*(MB**2 - MT**2 - MW**2) + cbWIm*MT*(MB**2 - MT**2 + MW**2)) + ctWRe*(2*cHtbRe*MB*(MB**2 - MT**2 - MW**2) - (2*cHl311 + 2*cHl322 - 4*cHQ3 - cll1221)*MT*(MB**2 - MT**2 + MW**2)))*sth*vevhat**3*cmath.sqrt(2))*cmath.sqrt(MB**4 + (MT**2 - MW**2)**2 - 2*MB**2*(MT**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)',
(P.u,P.d__tilde__):'-((16*ee**2*LambdaSMEFT**4*MD**4 - 32*ee**2*LambdaSMEFT**4*MD**2*MU**2 + 16*ee**2*LambdaSMEFT**4*MU**4 + 16*ee**2*LambdaSMEFT**4*MD**2*MW**2 + 16*ee**2*LambdaSMEFT**4*MU**2*MW**2 - 32*ee**2*LambdaSMEFT**4*MW**4 + 8*ee**2*LambdaSMEFT**2*vevhat**2*(-2*cHl322*MD**4 + cll1221*MD**4 + 4*cHl322*MD**2*MU**2 - 2*cll1221*MD**2*MU**2 - 2*cHl322*MU**4 + cll1221*MU**4 - 2*cHl322*MD**2*MW**2 + cll1221*MD**2*MW**2 - 2*cHl322*MU**2*MW**2 + cll1221*MU**2*MW**2 + 4*cHl322*MW**4 - 2*cll1221*MW**4 + 4*cHj3*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 12*cHudRe*MD*MU*MW**2*ydo*yup) - 128*MW**2*sth**2*vevhat**2*(cdWIm**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 + cdWRe**2*(-2*MD**4 - 2*MU**4 + MU**2*MW**2 + MW**4 + MD**2*(4*MU**2 + MW**2))*ydo**2 - 12*cdWIm*cuWIm*MD*MU*MW**2*ydo*yup + 12*cdWRe*cuWRe*MD*MU*MW**2*ydo*yup - (cuWIm**2 + cuWRe**2)*(2*MD**4 + 2*MU**4 - MU**2*MW**2 - MW**4 - MD**2*(4*MU**2 + MW**2))*yup**2) + ee**2*vevhat**4*(4*cHl322**2*MD**4 - 4*cHl322*cll1221*MD**4 + cll1221**2*MD**4 - 8*cHl322**2*MD**2*MU**2 + 8*cHl322*cll1221*MD**2*MU**2 - 2*cll1221**2*MD**2*MU**2 + 4*cHl322**2*MU**4 - 4*cHl322*cll1221*MU**4 + cll1221**2*MU**4 + 4*cHl322**2*MD**2*MW**2 - 4*cHl322*cll1221*MD**2*MW**2 + cll1221**2*MD**2*MW**2 + 4*cHl322**2*MU**2*MW**2 - 4*cHl322*cll1221*MU**2*MW**2 + cll1221**2*MU**2*MW**2 - 8*cHl322**2*MW**4 + 8*cHl322*cll1221*MW**4 - 2*cll1221**2*MW**4 + 16*cHj3**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 4*cHl311**2*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 48*cHl322*cHudRe*MD*MU*MW**2*ydo*yup + 24*cHudRe*cll1221*MD*MU*MW**2*ydo*yup + 4*cHudIm**2*MD**4*ydo**2*yup**2 + 4*cHudRe**2*MD**4*ydo**2*yup**2 - 8*cHudIm**2*MD**2*MU**2*ydo**2*yup**2 - 8*cHudRe**2*MD**2*MU**2*ydo**2*yup**2 + 4*cHudIm**2*MU**4*ydo**2*yup**2 + 4*cHudRe**2*MU**4*ydo**2*yup**2 + 4*cHudIm**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MD**2*MW**2*ydo**2*yup**2 + 4*cHudIm**2*MU**2*MW**2*ydo**2*yup**2 + 4*cHudRe**2*MU**2*MW**2*ydo**2*yup**2 - 8*cHudIm**2*MW**4*ydo**2*yup**2 - 8*cHudRe**2*MW**4*ydo**2*yup**2 - 8*cHj3*(-(cll1221*MD**4) + 2*cll1221*MD**2*MU**2 - cll1221*MU**4 - cll1221*MD**2*MW**2 - cll1221*MU**2*MW**2 + 2*cll1221*MW**4 + 2*cHl311*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) + 2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup) + 4*cHl311*(2*cHl322*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - cll1221*(MD**4 + MU**4 + MU**2*MW**2 - 2*MW**4 + MD**2*(-2*MU**2 + MW**2)) - 12*cHudRe*MD*MU*MW**2*ydo*yup)) + 192*ee*LambdaSMEFT**2*MW**2*sth*vevhat*(cdWRe*MD*(-MD**2 + MU**2 + MW**2)*ydo + cuWRe*MU*(MD**2 - MU**2 + MW**2)*yup)*cmath.sqrt(2) + 48*ee*MW**2*sth*vevhat**3*(yup*(-2*cHl322*cuWRe*MD**2*MU + cll1221*cuWRe*MD**2*MU + 2*cHl322*cuWRe*MU**3 - cll1221*cuWRe*MU**3 - 2*cHl322*cuWRe*MU*MW**2 + cll1221*cuWRe*MU*MW**2 + 4*cHj3*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cHl311*cuWRe*MU*(MD**2 - MU**2 + MW**2) - 2*cdWIm*cHudIm*MD**2*MU*ydo**2 + 2*cdWIm*cHudIm*MU**3*ydo**2 - 2*cdWIm*cHudIm*MU*MW**2*ydo**2 - 2*cHudIm*cuWIm*MD**3*ydo*yup + 2*cHudRe*cuWRe*MD**3*ydo*yup + 2*cHudIm*cuWIm*MD*MU**2*ydo*yup - 2*cHudRe*cuWRe*MD*MU**2*ydo*yup + 2*cHudIm*cuWIm*MD*MW**2*ydo*yup - 2*cHudRe*cuWRe*MD*MW**2*ydo*yup) + cdWRe*ydo*(2*cHl322*MD**3 - cll1221*MD**3 - 2*cHl322*MD*MU**2 + cll1221*MD*MU**2 - 2*cHl322*MD*MW**2 + cll1221*MD*MW**2 + 2*cHl311*MD*(MD**2 - MU**2 - MW**2) + 4*cHj3*MD*(-MD**2 + MU**2 + MW**2) - 2*cHudRe*MD**2*MU*ydo*yup + 2*cHudRe*MU**3*ydo*yup - 2*cHudRe*MU*MW**2*ydo*yup))*cmath.sqrt(2))*cmath.sqrt(MD**4 + (MU**2 - MW**2)**2 - 2*MD**2*(MU**2 + MW**2)))/(512.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)',
(P.ve,P.e__plus__):'((Me**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*Me**2 + 32*ee**2*LambdaSMEFT**4*MW**2 + 8*(2*cHl311 - 2*cHl322 + cll1221)*ee**2*LambdaSMEFT**2*(Me**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm11**2 + ceWRe11**2)*MW**2*(2*Me**2 + MW**2)*sth**2*vevhat**2 + (2*cHl311 - 2*cHl322 + cll1221)**2*ee**2*(Me**2 + 2*MW**2)*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MW**2*sth*vevhat*cmath.sqrt(2) - 48*ceWRe11*(2*cHl311 - 2*cHl322 + cll1221)*ee*Me*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)',
(P.vm,P.mu__plus__):'((MMU**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MMU**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 8*(2*cHl311 - 2*cHl322 - cll1221)*ee**2*LambdaSMEFT**2*(MMU**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm22**2 + ceWRe22**2)*MW**2*(2*MMU**2 + MW**2)*sth**2*vevhat**2 + (-2*cHl311 + 2*cHl322 + cll1221)**2*ee**2*(MMU**2 + 2*MW**2)*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MW**2*sth*vevhat*cmath.sqrt(2) + 48*ceWRe22*(2*cHl311 - 2*cHl322 - cll1221)*ee*MMU*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)',
(P.vt,P.ta__plus__):'((MTA**2 - MW**2)**2*(16*ee**2*LambdaSMEFT**4*MTA**2 + 32*ee**2*LambdaSMEFT**4*MW**2 - 8*(2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee**2*LambdaSMEFT**2*(MTA**2 + 2*MW**2)*vevhat**2 + 128*(ceWIm33**2 + ceWRe33**2)*MW**2*(2*MTA**2 + MW**2)*sth**2*vevhat**2 + (-2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)**2*ee**2*(MTA**2 + 2*MW**2)*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MW**2*sth*vevhat*cmath.sqrt(2) + 48*ceWRe33*(2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*MTA*MW**2*sth*vevhat**3*cmath.sqrt(2)))/(1536.*cmath.pi*LambdaSMEFT**4*MW**5*sth**2)'})
Decay_Z = Decay(name = 'Decay_Z',
particle = P.Z,
partial_widths = {(P.a,P.H):'-((MH**2 - MZ**2)**3*(gHza**2*LambdaSMEFT**4 - 2*gHza*LambdaSMEFT**2*(cHWB + 2*(cHB - cHW)*cth*sth - 2*cHWB*sth**2)*vevhat**2 + (cHWB**2*(1 - 2*sth**2)**2 + cHWBtil**2*(1 - 2*sth**2)**2 - 4*(cHB**2 + cHBtil**2 - 2*cHB*cHW + cHW**2 - 2*cHBtil*cHWtil + cHWtil**2)*sth**2*(-1 + sth**2) - 4*(cHB - cHW)*cHWB*cth*sth*(-1 + 2*sth**2) - 4*cHWBtil*(cHBtil - cHWtil)*cth*sth*(-1 + 2*sth**2))*vevhat**4))/(24.*cmath.pi*LambdaSMEFT**4*MZ**3*vevhat**2)',
(P.b,P.b__tilde__):'((-144*ee**2*LambdaSMEFT**4*MB**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(2*MB**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*MB**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(36*cHbq*MB**2 + 3*(2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)*(MB**2 - MZ**2) + cHDD*(11*MB**2 + MZ**2))*vevhat**2 + 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2)*sth*vevhat**2 - 32*(6*cHbq*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) + 4*cHDD*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) + 3*(4*cHQ1*ee**2*LambdaSMEFT**2*MB**2 + 4*cHQ3*ee**2*LambdaSMEFT**2*MB**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2 + 2*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 48*cbWIm**2*MB**2*MZ**2 - 96*cbWRe**2*MB**2*MZ**2 - 12*cbWIm**2*MZ**4 - 12*cbWRe**2*MZ**4 - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) - 2*cHl322*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2)))*sth**2*vevhat**2 - 256*cth*(cHWB*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) - 9*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2)))*sth**3*vevhat**2 + 64*(-4*cHl322*ee**2*LambdaSMEFT**2*MB**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MB**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 72*cbBIm**2*MB**2*MZ**2 + 144*cbBRe**2*MB**2*MZ**2 + 144*cbWIm**2*MB**2*MZ**2 - 288*cbWRe**2*MB**2*MZ**2 + 18*cbBIm**2*MZ**4 + 18*cbBRe**2*MZ**4 - 36*cbWIm**2*MZ**4 - 36*cbWRe**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MB**2 + MZ**2))*sth**4*vevhat**2 - 2304*cth*MZ**2*(cbBIm*cbWIm*(-4*MB**2 + MZ**2) + cbBRe*cbWRe*(8*MB**2 + MZ**2))*sth**5*vevhat**2 + 1152*MZ**2*(-4*cbWIm**2*MB**2 + 8*cbWRe**2*MB**2 + cbWIm**2*MZ**2 + cbWRe**2*MZ**2 + cbBIm**2*(4*MB**2 - MZ**2) - cbBRe**2*(8*MB**2 + MZ**2))*sth**6*vevhat**2 + ee**2*(-144*cHbq**2*(MB**2 - MZ**2) - 9*(-2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221)**2*(MB**2 - MZ**2) - 6*cHDD*(2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)*(11*MB**2 + MZ**2) + cHDD**2*(7*MB**2 + 17*MZ**2) - 24*cHbq*(9*(2*cHl311 + 2*cHl322 - 4*cHQ1 - 4*cHQ3 - cll1221)*MB**2 + cHDD*(MB**2 - 4*MZ**2)))*vevhat**4 + 16*cHWB*(12*cHbq + 5*cHDD + 3*(-2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221))*cth*ee**2*(2*MB**2 + MZ**2)*sth*vevhat**4 - 4*(5*cHDD**2 + 12*cHl311**2 + 24*cHl311*cHl322 + 12*cHl322**2 - 24*cHl311*cHQ1 - 24*cHl322*cHQ1 - 24*cHl311*cHQ3 - 24*cHl322*cHQ3 - 32*cHWB**2 - 4*cHDD*(4*cHl311 + 4*cHl322 - 3*cHQ1 - 3*cHQ3 - 2*cll1221) - 12*cHl311*cll1221 - 12*cHl322*cll1221 + 12*cHQ1*cll1221 + 12*cHQ3*cll1221 + 3*cll1221**2 + 12*cHbq*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee**2*(2*MB**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MB**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MB**2 + MZ**2)*sth**4*vevhat**4 - 1728*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth*vevhat*cmath.sqrt(2) - 1728*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 4032*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 2304*cbBRe*cth*ee*LambdaSMEFT**2*MB*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 2304*cbWRe*ee*LambdaSMEFT**2*MB*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 144*cbWRe*(12*cHbq + 5*cHDD + 3*(-2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221))*ee*MB*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 144*(16*cbWRe*cHWB + cbBRe*(12*cHbq + 5*cHDD + 3*(-2*cHl311 - 2*cHl322 + 4*cHQ1 + 4*cHQ3 + cll1221)))*cth*ee*MB*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 144*(-16*cbBRe*cHWB + cbWRe*(12*cHbq + 9*cHDD - 14*cHl311 - 14*cHl322 + 12*cHQ1 + 12*cHQ3 + 7*cll1221))*ee*MB*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 576*(4*cbWRe*cHWB + cbBRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MB*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 576*(-4*cbBRe*cHWB + cbWRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*MB*MZ**2*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MB**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.c,P.c__tilde__):'((-144*ee**2*LambdaSMEFT**4*MC**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 384*ee**2*LambdaSMEFT**4*(2*MC**2 + MZ**2)*sth**2 + 512*ee**2*LambdaSMEFT**4*(2*MC**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(19*cHDD*MC**2 + 5*cHDD*MZ**2 + 3*(-4*cHj3*MC**2 + 2*cHl311*MC**2 + 2*cHl322*MC**2 - 12*cHu*MC**2 - cll1221*MC**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + 4*cHj1*(MC**2 - MZ**2)))*vevhat**2 + 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2)*sth*vevhat**2 + ee**2*(cHDD**2*(151*MC**2 + 89*MZ**2) - 6*cHDD*(76*cHj1*MC**2 - 76*cHj3*MC**2 + 38*cHl311*MC**2 + 38*cHl322*MC**2 + 28*cHu*MC**2 - 19*cll1221*MC**2 + 20*cHj1*MZ**2 - 20*cHj3*MZ**2 + 10*cHl311*MZ**2 + 10*cHl322*MZ**2 + 32*cHu*MZ**2 - 5*cll1221*MZ**2) - 9*(4*cHl311**2*MC**2 + 8*cHl311*cHl322*MC**2 + 4*cHl322**2*MC**2 - 48*cHl311*cHu*MC**2 - 48*cHl322*cHu*MC**2 + 16*cHu**2*MC**2 - 4*cHl311*cll1221*MC**2 - 4*cHl322*cll1221*MC**2 + 24*cHu*cll1221*MC**2 + cll1221**2*MC**2 - 4*cHl311**2*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 - 16*cHu**2*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + 16*cHj1**2*(MC**2 - MZ**2) + 16*cHj3**2*(MC**2 - MZ**2) + 8*cHj3*(-2*cHl311*MC**2 - 2*cHl322*MC**2 + 12*cHu*MC**2 + cll1221*MC**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) - 8*cHj1*(-2*cHl311*MC**2 - 2*cHl322*MC**2 + 12*cHu*MC**2 + cll1221*MC**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2 + 4*cHj3*(MC**2 - MZ**2))))*vevhat**4 + 32*cHWB*(13*cHDD - 3*(4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221))*cth*ee**2*(2*MC**2 + MZ**2)*sth*vevhat**4 - 8*(13*cHDD**2 - 24*cHj3*cHl311 + 12*cHl311**2 - 24*cHj3*cHl322 + 24*cHl311*cHl322 + 12*cHl322**2 + 24*cHl311*cHu + 24*cHl322*cHu - 64*cHWB**2 - 4*cHDD*(3*cHj1 - 3*cHj3 + 8*cHl311 + 8*cHl322 + 3*cHu - 4*cll1221) + 12*cHj1*(2*cHl311 + 2*cHl322 - cll1221) + 12*cHj3*cll1221 - 12*cHl311*cll1221 - 12*cHl322*cll1221 - 12*cHu*cll1221 + 3*cll1221**2)*ee**2*(2*MC**2 + MZ**2)*sth**2*vevhat**4 - 256*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MC**2 + MZ**2)*sth**3*vevhat**4 + 32*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MC**2 + MZ**2)*sth**4*vevhat**4 + 2304*cth*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*sth**5*vevhat**2*yc**2 + 1152*MZ**2*(-4*cuWIm**2*MC**2 + 8*cuWRe**2*MC**2 + cuWIm**2*MZ**2 + cuWRe**2*MZ**2 + cuBIm**2*(4*MC**2 - MZ**2) - cuBRe**2*(8*MC**2 + MZ**2))*sth**6*vevhat**2*yc**2 + 128*sth**4*vevhat**2*(-8*cHl322*ee**2*LambdaSMEFT**2*MC**2 + 4*cll1221*ee**2*LambdaSMEFT**2*MC**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHDD*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 4*cHl311*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 36*cuBIm**2*MC**2*MZ**2*yc**2 + 72*cuBRe**2*MC**2*MZ**2*yc**2 + 72*cuWIm**2*MC**2*MZ**2*yc**2 - 144*cuWRe**2*MC**2*MZ**2*yc**2 + 9*cuBIm**2*MZ**4*yc**2 + 9*cuBRe**2*MZ**4*yc**2 - 18*cuWIm**2*MZ**4*yc**2 - 18*cuWRe**2*MZ**4*yc**2) - 256*cth*sth**3*vevhat**2*(4*cHWB*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) + 9*MZ**2*(cuBIm*cuWIm*(-4*MC**2 + MZ**2) + cuBRe*cuWRe*(8*MC**2 + MZ**2))*yc**2) - 64*sth**2*vevhat**2*(8*cHDD*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 3*(4*cHl311*ee**2*LambdaSMEFT**2*MC**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MC**2 + 4*cHu*ee**2*LambdaSMEFT**2*MC**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MC**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHu*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHj1*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 2*cHj3*ee**2*LambdaSMEFT**2*(2*MC**2 + MZ**2) - 24*cuWIm**2*MC**2*MZ**2*yc**2 + 48*cuWRe**2*MC**2*MZ**2*yc**2 + 6*cuWIm**2*MZ**4*yc**2 + 6*cuWRe**2*MZ**4*yc**2)) - 1728*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth*vevhat*yc*cmath.sqrt(2) + 1728*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**2*vevhat*yc*cmath.sqrt(2) + 6336*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**3*vevhat*yc*cmath.sqrt(2) - 4608*cth*cuBRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**4*vevhat*yc*cmath.sqrt(2) - 4608*cuWRe*ee*LambdaSMEFT**2*MC*MZ**2*sth**5*vevhat*yc*cmath.sqrt(2) - 144*(13*cHDD - 3*(4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221))*cuWRe*ee*MC*MZ**2*sth*vevhat**3*yc*cmath.sqrt(2) + 144*cth*(13*cHDD*cuBRe - 12*cHj1*cuBRe + 12*cHj3*cuBRe - 6*cHl311*cuBRe - 6*cHl322*cuBRe - 12*cHu*cuBRe + 3*cll1221*cuBRe - 32*cHWB*cuWRe)*ee*MC*MZ**2*sth**2*vevhat**3*yc*cmath.sqrt(2) + 144*(32*cHWB*cuBRe + (21*cHDD - 12*cHj1 + 12*cHj3 - 22*cHl311 - 22*cHl322 - 12*cHu + 11*cll1221)*cuWRe)*ee*MC*MZ**2*sth**3*vevhat**3*yc*cmath.sqrt(2) - 1152*cth*(cHDD*cuBRe - 2*cHl311*cuBRe - 2*cHl322*cuBRe + cll1221*cuBRe - 4*cHWB*cuWRe)*ee*MC*MZ**2*sth**4*vevhat**3*yc*cmath.sqrt(2) - 1152*(4*cHWB*cuBRe + (cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cuWRe)*ee*MC*MZ**2*sth**5*vevhat**3*yc*cmath.sqrt(2))*cmath.sqrt(-4*MC**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.d,P.d__tilde__):'((-144*ee**2*LambdaSMEFT**4*MD**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(2*MD**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*MD**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(36*cHd*MD**2 - 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*(MD**2 - MZ**2) + cHDD*(11*MD**2 + MZ**2))*vevhat**2 + 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2)*sth*vevhat**2 + ee**2*(-144*cHd**2*(MD**2 - MZ**2) - 9*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)**2*(MD**2 - MZ**2) + 6*cHDD*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*(11*MD**2 + MZ**2) + cHDD**2*(7*MD**2 + 17*MZ**2) - 24*cHd*(-9*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*MD**2 + cHDD*(MD**2 - 4*MZ**2)))*vevhat**4 + 16*cHWB*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee**2*(2*MD**2 + MZ**2)*sth*vevhat**4 - 4*(5*cHDD**2 - 24*cHj1*cHl311 - 24*cHj3*cHl311 + 12*cHl311**2 - 24*cHj1*cHl322 - 24*cHj3*cHl322 + 24*cHl311*cHl322 + 12*cHl322**2 - 32*cHWB**2 + 12*cHj1*cll1221 + 12*cHj3*cll1221 - 12*cHl311*cll1221 - 12*cHl322*cll1221 + 3*cll1221**2 + 12*cHd*(cHDD - 2*cHl311 - 2*cHl322 + cll1221) + 4*cHDD*(3*cHj1 + 3*cHj3 - 4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MD**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MD**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MD**2 + MZ**2)*sth**4*vevhat**4 - 2304*cth*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*sth**5*vevhat**2*ydo**2 + 1152*MZ**2*(-4*cdWIm**2*MD**2 + 8*cdWRe**2*MD**2 + cdWIm**2*MZ**2 + cdWRe**2*MZ**2 + cdBIm**2*(4*MD**2 - MZ**2) - cdBRe**2*(8*MD**2 + MZ**2))*sth**6*vevhat**2*ydo**2 + 64*sth**4*vevhat**2*(-4*cHl322*ee**2*LambdaSMEFT**2*MD**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) - 72*cdBIm**2*MD**2*MZ**2*ydo**2 + 144*cdBRe**2*MD**2*MZ**2*ydo**2 + 144*cdWIm**2*MD**2*MZ**2*ydo**2 - 288*cdWRe**2*MD**2*MZ**2*ydo**2 + 18*cdBIm**2*MZ**4*ydo**2 + 18*cdBRe**2*MZ**4*ydo**2 - 36*cdWIm**2*MZ**4*ydo**2 - 36*cdWRe**2*MZ**4*ydo**2) - 256*cth*sth**3*vevhat**2*(cHWB*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) - 9*MZ**2*(cdBIm*cdWIm*(-4*MD**2 + MZ**2) + cdBRe*cdWRe*(8*MD**2 + MZ**2))*ydo**2) - 32*sth**2*vevhat**2*(6*cHd*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 4*cHDD*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 3*(-4*cHl311*ee**2*LambdaSMEFT**2*MD**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MD**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MD**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHj1*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 2*cHj3*ee**2*LambdaSMEFT**2*(2*MD**2 + MZ**2) + 48*cdWIm**2*MD**2*MZ**2*ydo**2 - 96*cdWRe**2*MD**2*MZ**2*ydo**2 - 12*cdWIm**2*MZ**4*ydo**2 - 12*cdWRe**2*MZ**4*ydo**2)) - 1728*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth*vevhat*ydo*cmath.sqrt(2) - 1728*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**2*vevhat*ydo*cmath.sqrt(2) + 4032*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**3*vevhat*ydo*cmath.sqrt(2) + 2304*cdBRe*cth*ee*LambdaSMEFT**2*MD*MZ**2*sth**4*vevhat*ydo*cmath.sqrt(2) - 2304*cdWRe*ee*LambdaSMEFT**2*MD*MZ**2*sth**5*vevhat*ydo*cmath.sqrt(2) - 144*cdWRe*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MD*MZ**2*sth*vevhat**3*ydo*cmath.sqrt(2) - 144*(16*cdWRe*cHWB + cdBRe*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)))*cth*ee*MD*MZ**2*sth**2*vevhat**3*ydo*cmath.sqrt(2) + 144*(-16*cdBRe*cHWB + cdWRe*(12*cHd + 9*cHDD + 12*cHj1 + 12*cHj3 - 14*cHl311 - 14*cHl322 + 7*cll1221))*ee*MD*MZ**2*sth**3*vevhat**3*ydo*cmath.sqrt(2) + 576*(4*cdWRe*cHWB + cdBRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MD*MZ**2*sth**4*vevhat**3*ydo*cmath.sqrt(2) - 576*(-4*cdBRe*cHWB + cdWRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*MD*MZ**2*sth**5*vevhat**3*ydo*cmath.sqrt(2))*cmath.sqrt(-4*MD**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.e__minus__,P.e__plus__):'((-16*ee**2*LambdaSMEFT**4*Me**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(2*Me**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*Me**2 + MZ**2)*sth**4 + 8*ee**2*LambdaSMEFT**2*(12*cHe11*Me**2 - (4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*(Me**2 - MZ**2) + 3*cHDD*(3*Me**2 + MZ**2))*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2)*sth*vevhat**2 - 32*(4*cHl111*ee**2*LambdaSMEFT**2*Me**2 - 4*cHl322*ee**2*LambdaSMEFT**2*Me**2 + 2*cll1221*ee**2*LambdaSMEFT**2*Me**2 + 2*cHl111*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm11**2*Me**2*MZ**2 - 32*ceWRe11**2*Me**2*MZ**2 - 4*ceWIm11**2*MZ**4 - 4*ceWRe11**2*MZ**4 + 4*cHDD*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) + 2*cHe11*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2))*sth**2*vevhat**2 - 256*cth*(cHWB*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) - MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2)))*sth**3*vevhat**2 + 64*(-4*cHl322*ee**2*LambdaSMEFT**2*Me**2 + 2*cll1221*ee**2*LambdaSMEFT**2*Me**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 8*ceBIm11**2*Me**2*MZ**2 + 16*ceBRe11**2*Me**2*MZ**2 + 16*ceWIm11**2*Me**2*MZ**2 - 32*ceWRe11**2*Me**2*MZ**2 + 2*ceBIm11**2*MZ**4 + 2*ceBRe11**2*MZ**4 - 4*ceWIm11**2*MZ**4 - 4*ceWRe11**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*Me**2 + MZ**2))*sth**4*vevhat**2 - 256*cth*MZ**2*(ceBIm11*ceWIm11*(-4*Me**2 + MZ**2) + ceBRe11*ceWRe11*(8*Me**2 + MZ**2))*sth**5*vevhat**2 + 128*MZ**2*(-4*ceWIm11**2*Me**2 + 8*ceWRe11**2*Me**2 + ceWIm11**2*MZ**2 + ceWRe11**2*MZ**2 + ceBIm11**2*(4*Me**2 - MZ**2) - ceBRe11**2*(8*Me**2 + MZ**2))*sth**6*vevhat**2 + ee**2*(24*cHe11*(4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*Me**2 - 16*cHe11**2*(Me**2 - MZ**2) - (4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)**2*(Me**2 - MZ**2) + 6*cHDD*(4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*(3*Me**2 + MZ**2) + 8*cHDD*cHe11*(5*Me**2 + 4*MZ**2) + cHDD**2*(47*Me**2 + 25*MZ**2))*vevhat**4 + 16*cHWB*(7*cHDD + 4*cHe11 + 4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*Me**2 + MZ**2)*sth*vevhat**4 - 4*(7*cHDD**2 - 8*cHl111*cHl311 - 4*cHl311**2 - 8*cHl111*cHl322 + 4*cHl322**2 - 32*cHWB**2 + 4*cHl111*cll1221 - 4*cHl322*cll1221 + cll1221**2 + 4*cHDD*(cHe11 + cHl111 - 3*cHl311 - 4*cHl322 + 2*cll1221) + cHe11*(-8*cHl311 - 8*cHl322 + 4*cll1221))*ee**2*(2*Me**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*Me**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*Me**2 + MZ**2)*sth**4*vevhat**4 - 192*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 960*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 768*ceBRe11*cth*ee*LambdaSMEFT**2*Me*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 768*ceWRe11*ee*LambdaSMEFT**2*Me*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 48*ceWRe11*(7*cHDD + 4*cHe11 + 4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*ee*Me*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 48*(16*ceWRe11*cHWB + ceBRe11*(7*cHDD + 4*cHe11 + 4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*Me*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 48*(-16*ceBRe11*cHWB + ceWRe11*(11*cHDD + 4*cHe11 + 4*cHl111 - 6*cHl311 - 10*cHl322 + 5*cll1221))*ee*Me*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 192*(4*ceWRe11*cHWB + ceBRe11*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*Me*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(-4*ceBRe11*cHWB + ceWRe11*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*Me*MZ**2*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*Me**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.mu__minus__,P.mu__plus__):'((-16*ee**2*LambdaSMEFT**4*MMU**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(2*MMU**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*MMU**2 + MZ**2)*sth**4 + 8*ee**2*LambdaSMEFT**2*(12*cHe22*MMU**2 - (4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*(MMU**2 - MZ**2) + 3*cHDD*(3*MMU**2 + MZ**2))*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2)*sth*vevhat**2 - 32*(4*cHl122*ee**2*LambdaSMEFT**2*MMU**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MMU**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MMU**2 + 2*cHl122*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm22**2*MMU**2*MZ**2 - 32*ceWRe22**2*MMU**2*MZ**2 - 4*ceWIm22**2*MZ**4 - 4*ceWRe22**2*MZ**4 + 4*cHDD*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) + 2*cHe22*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2))*sth**2*vevhat**2 - 256*cth*(cHWB*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) - MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2)))*sth**3*vevhat**2 + 64*(-4*cHl322*ee**2*LambdaSMEFT**2*MMU**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MMU**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 8*ceBIm22**2*MMU**2*MZ**2 + 16*ceBRe22**2*MMU**2*MZ**2 + 16*ceWIm22**2*MMU**2*MZ**2 - 32*ceWRe22**2*MMU**2*MZ**2 + 2*ceBIm22**2*MZ**4 + 2*ceBRe22**2*MZ**4 - 4*ceWIm22**2*MZ**4 - 4*ceWRe22**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MMU**2 + MZ**2))*sth**4*vevhat**2 - 256*cth*MZ**2*(ceBIm22*ceWIm22*(-4*MMU**2 + MZ**2) + ceBRe22*ceWRe22*(8*MMU**2 + MZ**2))*sth**5*vevhat**2 + 128*MZ**2*(-4*ceWIm22**2*MMU**2 + 8*ceWRe22**2*MMU**2 + ceWIm22**2*MZ**2 + ceWRe22**2*MZ**2 + ceBIm22**2*(4*MMU**2 - MZ**2) - ceBRe22**2*(8*MMU**2 + MZ**2))*sth**6*vevhat**2 + ee**2*(24*cHe22*(4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*MMU**2 - 16*cHe22**2*(MMU**2 - MZ**2) - (4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)**2*(MMU**2 - MZ**2) + 6*cHDD*(4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*(3*MMU**2 + MZ**2) + 8*cHDD*cHe22*(5*MMU**2 + 4*MZ**2) + cHDD**2*(47*MMU**2 + 25*MZ**2))*vevhat**4 + 16*cHWB*(7*cHDD + 4*cHe22 + 4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*cth*ee**2*(2*MMU**2 + MZ**2)*sth*vevhat**4 - 4*(7*cHDD**2 - 8*cHl122*cHl311 + 4*cHl311**2 - 8*cHl122*cHl322 - 4*cHl322**2 - 32*cHWB**2 + 4*cHl122*cll1221 - 4*cHl311*cll1221 + cll1221**2 + 4*cHDD*(cHe22 + cHl122 - 4*cHl311 - 3*cHl322 + 2*cll1221) + cHe22*(-8*cHl311 - 8*cHl322 + 4*cll1221))*ee**2*(2*MMU**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MMU**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MMU**2 + MZ**2)*sth**4*vevhat**4 - 192*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 960*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 768*ceBRe22*cth*ee*LambdaSMEFT**2*MMU*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 768*ceWRe22*ee*LambdaSMEFT**2*MMU*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 48*ceWRe22*(7*cHDD + 4*cHe22 + 4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*ee*MMU*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 48*(16*ceWRe22*cHWB + ceBRe22*(7*cHDD + 4*cHe22 + 4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221))*cth*ee*MMU*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 48*(-16*ceBRe22*cHWB + ceWRe22*(11*cHDD + 4*cHe22 + 4*cHl122 - 10*cHl311 - 6*cHl322 + 5*cll1221))*ee*MMU*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 192*(4*ceWRe22*cHWB + ceBRe22*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MMU*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(-4*ceBRe22*cHWB + ceWRe22*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*MMU*MZ**2*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MMU**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.s,P.s__tilde__):'((-144*ee**2*LambdaSMEFT**4*MS**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 192*ee**2*LambdaSMEFT**4*(2*MS**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*MS**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(36*cHd*MS**2 - 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*(MS**2 - MZ**2) + cHDD*(11*MS**2 + MZ**2))*vevhat**2 + 192*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2)*sth*vevhat**2 + ee**2*(-144*cHd**2*(MS**2 - MZ**2) - 9*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)**2*(MS**2 - MZ**2) + 6*cHDD*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*(11*MS**2 + MZ**2) + cHDD**2*(7*MS**2 + 17*MZ**2) - 24*cHd*(-9*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)*MS**2 + cHDD*(MS**2 - 4*MZ**2)))*vevhat**4 + 16*cHWB*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee**2*(2*MS**2 + MZ**2)*sth*vevhat**4 - 4*(5*cHDD**2 - 24*cHj1*cHl311 - 24*cHj3*cHl311 + 12*cHl311**2 - 24*cHj1*cHl322 - 24*cHj3*cHl322 + 24*cHl311*cHl322 + 12*cHl322**2 - 32*cHWB**2 + 12*cHj1*cll1221 + 12*cHj3*cll1221 - 12*cHl311*cll1221 - 12*cHl322*cll1221 + 3*cll1221**2 + 12*cHd*(cHDD - 2*cHl311 - 2*cHl322 + cll1221) + 4*cHDD*(3*cHj1 + 3*cHj3 - 4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MS**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MS**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MS**2 + MZ**2)*sth**4*vevhat**4 - 2304*cth*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*sth**5*vevhat**2*ys**2 + 1152*MZ**2*(-4*cdWIm**2*MS**2 + 8*cdWRe**2*MS**2 + cdWIm**2*MZ**2 + cdWRe**2*MZ**2 + cdBIm**2*(4*MS**2 - MZ**2) - cdBRe**2*(8*MS**2 + MZ**2))*sth**6*vevhat**2*ys**2 + 64*sth**4*vevhat**2*(-4*cHl322*ee**2*LambdaSMEFT**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MS**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + cHDD*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) - 72*cdBIm**2*MS**2*MZ**2*ys**2 + 144*cdBRe**2*MS**2*MZ**2*ys**2 + 144*cdWIm**2*MS**2*MZ**2*ys**2 - 288*cdWRe**2*MS**2*MZ**2*ys**2 + 18*cdBIm**2*MZ**4*ys**2 + 18*cdBRe**2*MZ**4*ys**2 - 36*cdWIm**2*MZ**4*ys**2 - 36*cdWRe**2*MZ**4*ys**2) - 256*cth*sth**3*vevhat**2*(cHWB*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) - 9*MZ**2*(cdBIm*cdWIm*(-4*MS**2 + MZ**2) + cdBRe*cdWRe*(8*MS**2 + MZ**2))*ys**2) - 32*sth**2*vevhat**2*(6*cHd*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 4*cHDD*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 3*(-4*cHl311*ee**2*LambdaSMEFT**2*MS**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MS**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MS**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHj1*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 2*cHj3*ee**2*LambdaSMEFT**2*(2*MS**2 + MZ**2) + 48*cdWIm**2*MS**2*MZ**2*ys**2 - 96*cdWRe**2*MS**2*MZ**2*ys**2 - 12*cdWIm**2*MZ**4*ys**2 - 12*cdWRe**2*MZ**4*ys**2)) - 1728*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth*vevhat*ys*cmath.sqrt(2) - 1728*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**2*vevhat*ys*cmath.sqrt(2) + 4032*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**3*vevhat*ys*cmath.sqrt(2) + 2304*cdBRe*cth*ee*LambdaSMEFT**2*MS*MZ**2*sth**4*vevhat*ys*cmath.sqrt(2) - 2304*cdWRe*ee*LambdaSMEFT**2*MS*MZ**2*sth**5*vevhat*ys*cmath.sqrt(2) - 144*cdWRe*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221))*ee*MS*MZ**2*sth*vevhat**3*ys*cmath.sqrt(2) - 144*(16*cdWRe*cHWB + cdBRe*(12*cHd + 5*cHDD + 3*(4*cHj1 + 4*cHj3 - 2*cHl311 - 2*cHl322 + cll1221)))*cth*ee*MS*MZ**2*sth**2*vevhat**3*ys*cmath.sqrt(2) + 144*(-16*cdBRe*cHWB + cdWRe*(12*cHd + 9*cHDD + 12*cHj1 + 12*cHj3 - 14*cHl311 - 14*cHl322 + 7*cll1221))*ee*MS*MZ**2*sth**3*vevhat**3*ys*cmath.sqrt(2) + 576*(4*cdWRe*cHWB + cdBRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MS*MZ**2*sth**4*vevhat**3*ys*cmath.sqrt(2) - 576*(-4*cdBRe*cHWB + cdWRe*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*MS*MZ**2*sth**5*vevhat**3*ys*cmath.sqrt(2))*cmath.sqrt(-4*MS**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.ta__minus__,P.ta__plus__):'((-16*ee**2*LambdaSMEFT**4*MTA**2 + 16*ee**2*LambdaSMEFT**4*MZ**2 - 64*ee**2*LambdaSMEFT**4*(2*MTA**2 + MZ**2)*sth**2 + 128*ee**2*LambdaSMEFT**4*(2*MTA**2 + MZ**2)*sth**4 + 8*ee**2*LambdaSMEFT**2*(12*cHe33*MTA**2 - (4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*(MTA**2 - MZ**2) + 3*cHDD*(3*MTA**2 + MZ**2))*vevhat**2 + 64*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2)*sth*vevhat**2 - 32*(4*cHl133*ee**2*LambdaSMEFT**2*MTA**2 - 4*cHl311*ee**2*LambdaSMEFT**2*MTA**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MTA**2 + 4*cHl333*ee**2*LambdaSMEFT**2*MTA**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MTA**2 + 2*cHl133*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl333*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 16*ceWIm33**2*MTA**2*MZ**2 - 32*ceWRe33**2*MTA**2*MZ**2 - 4*ceWIm33**2*MZ**4 - 4*ceWRe33**2*MZ**4 + 4*cHDD*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) + 2*cHe33*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2))*sth**2*vevhat**2 - 256*cth*(cHWB*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) - MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2)))*sth**3*vevhat**2 + 64*(-4*cHl322*ee**2*LambdaSMEFT**2*MTA**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MTA**2 - 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 8*ceBIm33**2*MTA**2*MZ**2 + 16*ceBRe33**2*MTA**2*MZ**2 + 16*ceWIm33**2*MTA**2*MZ**2 - 32*ceWRe33**2*MTA**2*MZ**2 + 2*ceBIm33**2*MZ**4 + 2*ceBRe33**2*MZ**4 - 4*ceWIm33**2*MZ**4 - 4*ceWRe33**2*MZ**4 + cHDD*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2) - 2*cHl311*ee**2*LambdaSMEFT**2*(2*MTA**2 + MZ**2))*sth**4*vevhat**2 - 256*cth*MZ**2*(ceBIm33*ceWIm33*(-4*MTA**2 + MZ**2) + ceBRe33*ceWRe33*(8*MTA**2 + MZ**2))*sth**5*vevhat**2 + 128*MZ**2*(-4*ceWIm33**2*MTA**2 + 8*ceWRe33**2*MTA**2 + ceWIm33**2*MZ**2 + ceWRe33**2*MZ**2 + ceBIm33**2*(4*MTA**2 - MZ**2) - ceBRe33**2*(8*MTA**2 + MZ**2))*sth**6*vevhat**2 + ee**2*(24*cHe33*(4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*MTA**2 - 16*cHe33**2*(MTA**2 - MZ**2) - (4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)**2*(MTA**2 - MZ**2) + 6*cHDD*(4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*(3*MTA**2 + MZ**2) + 8*cHDD*cHe33*(5*MTA**2 + 4*MZ**2) + cHDD**2*(47*MTA**2 + 25*MZ**2))*vevhat**4 + 16*cHWB*(7*cHDD + 4*cHe33 + 4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*cth*ee**2*(2*MTA**2 + MZ**2)*sth*vevhat**4 - 4*(7*cHDD**2 - 8*cHl133*cHl311 + 4*cHl311**2 - 8*cHl133*cHl322 + 8*cHl311*cHl322 + 4*cHl322**2 - 8*cHl311*cHl333 - 8*cHl322*cHl333 - 32*cHWB**2 + 4*cHl133*cll1221 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + 4*cHl333*cll1221 + cll1221**2 + 4*cHDD*(cHe33 + cHl133 - 4*cHl311 - 4*cHl322 + cHl333 + 2*cll1221) + cHe33*(-8*cHl311 - 8*cHl322 + 4*cll1221))*ee**2*(2*MTA**2 + MZ**2)*sth**2*vevhat**4 - 64*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MTA**2 + MZ**2)*sth**3*vevhat**4 + 8*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MTA**2 + MZ**2)*sth**4*vevhat**4 - 192*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth*vevhat*cmath.sqrt(2) - 192*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 960*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**3*vevhat*cmath.sqrt(2) + 768*ceBRe33*cth*ee*LambdaSMEFT**2*MTA*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 768*ceWRe33*ee*LambdaSMEFT**2*MTA*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 48*ceWRe33*(7*cHDD + 4*cHe33 + 4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221)*ee*MTA*MZ**2*sth*vevhat**3*cmath.sqrt(2) - 48*(16*ceWRe33*cHWB + ceBRe33*(7*cHDD + 4*cHe33 + 4*cHl133 - 2*cHl311 - 2*cHl322 + 4*cHl333 + cll1221))*cth*ee*MTA*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 48*(-16*ceBRe33*cHWB + ceWRe33*(11*cHDD + 4*cHe33 + 4*cHl133 - 10*cHl311 - 10*cHl322 + 4*cHl333 + 5*cll1221))*ee*MTA*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) + 192*(4*ceWRe33*cHWB + ceBRe33*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*cth*ee*MTA*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 192*(-4*ceBRe33*cHWB + ceWRe33*(cHDD - 2*cHl311 - 2*cHl322 + cll1221))*ee*MTA*MZ**2*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MTA**2 + MZ**2))/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.t,P.t__tilde__):'((-144*ee**2*LambdaSMEFT**4*MT**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 384*ee**2*LambdaSMEFT**4*(2*MT**2 + MZ**2)*sth**2 + 512*ee**2*LambdaSMEFT**4*(2*MT**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(cHDD*(19*MT**2 + 5*MZ**2) + 3*(2*cHl322*MT**2 + 4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 2*cHl322*MZ**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2 + 2*cHl311*(MT**2 - MZ**2)))*vevhat**2 + 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2)*sth*vevhat**2 - 64*(8*cHDD*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) - 3*(4*cHQ1*ee**2*LambdaSMEFT**2*MT**2 - 4*cHQ3*ee**2*LambdaSMEFT**2*MT**2 + 4*cHt*ee**2*LambdaSMEFT**2*MT**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MT**2 + 2*cHQ1*ee**2*LambdaSMEFT**2*MZ**2 - 2*cHQ3*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHt*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 24*ctWIm**2*MT**2*MZ**2 + 48*ctWRe**2*MT**2*MZ**2 + 6*ctWIm**2*MZ**4 + 6*ctWRe**2*MZ**4 + 2*cHl311*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) + 2*cHl322*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2)))*sth**2*vevhat**2 - 256*cth*(4*cHWB*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) + 9*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2)))*sth**3*vevhat**2 + 128*(-8*cHl322*ee**2*LambdaSMEFT**2*MT**2 + 4*cll1221*ee**2*LambdaSMEFT**2*MT**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MZ**2 - 36*ctBIm**2*MT**2*MZ**2 + 72*ctBRe**2*MT**2*MZ**2 + 72*ctWIm**2*MT**2*MZ**2 - 144*ctWRe**2*MT**2*MZ**2 + 9*ctBIm**2*MZ**4 + 9*ctBRe**2*MZ**4 - 18*ctWIm**2*MZ**4 - 18*ctWRe**2*MZ**4 + 2*cHDD*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2) - 4*cHl311*ee**2*LambdaSMEFT**2*(2*MT**2 + MZ**2))*sth**4*vevhat**2 + 2304*cth*MZ**2*(ctBIm*ctWIm*(-4*MT**2 + MZ**2) + ctBRe*ctWRe*(8*MT**2 + MZ**2))*sth**5*vevhat**2 + 1152*MZ**2*(-4*ctWIm**2*MT**2 + 8*ctWRe**2*MT**2 + ctWIm**2*MZ**2 + ctWRe**2*MZ**2 + ctBIm**2*(4*MT**2 - MZ**2) - ctBRe**2*(8*MT**2 + MZ**2))*sth**6*vevhat**2 + ee**2*(cHDD**2*(151*MT**2 + 89*MZ**2) - 6*cHDD*(38*cHl311*MT**2 + 38*cHl322*MT**2 + 76*cHQ1*MT**2 - 76*cHQ3*MT**2 + 28*cHt*MT**2 - 19*cll1221*MT**2 + 10*cHl311*MZ**2 + 10*cHl322*MZ**2 + 20*cHQ1*MZ**2 - 20*cHQ3*MZ**2 + 32*cHt*MZ**2 - 5*cll1221*MZ**2) - 9*(16*cHQ1**2*MT**2 - 32*cHQ1*cHQ3*MT**2 + 16*cHQ3**2*MT**2 - 96*cHQ1*cHt*MT**2 + 96*cHQ3*cHt*MT**2 + 16*cHt**2*MT**2 - 8*cHQ1*cll1221*MT**2 + 8*cHQ3*cll1221*MT**2 + 24*cHt*cll1221*MT**2 + cll1221**2*MT**2 - 16*cHQ1**2*MZ**2 + 32*cHQ1*cHQ3*MZ**2 - 16*cHQ3**2*MZ**2 - 16*cHt**2*MZ**2 + 8*cHQ1*cll1221*MZ**2 - 8*cHQ3*cll1221*MZ**2 - cll1221**2*MZ**2 + 4*cHl311**2*(MT**2 - MZ**2) + 4*cHl322**2*(MT**2 - MZ**2) + 4*cHl322*(4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2) + 4*cHl311*(4*cHQ1*MT**2 - 4*cHQ3*MT**2 - 12*cHt*MT**2 - cll1221*MT**2 - 4*cHQ1*MZ**2 + 4*cHQ3*MZ**2 + cll1221*MZ**2 + 2*cHl322*(MT**2 - MZ**2))))*vevhat**4 + 32*cHWB*(13*cHDD - 3*(2*cHl311 + 2*cHl322 + 4*cHQ1 - 4*cHQ3 + 4*cHt - cll1221))*cth*ee**2*(2*MT**2 + MZ**2)*sth*vevhat**4 - 8*(13*cHDD**2 + 12*cHl311**2 + 12*cHl322**2 + 24*cHl322*cHQ1 - 24*cHl322*cHQ3 + 24*cHl322*cHt - 64*cHWB**2 - 4*cHDD*(8*cHl311 + 8*cHl322 + 3*cHQ1 - 3*cHQ3 + 3*cHt - 4*cll1221) + 12*cHl311*(2*cHl322 + 2*cHQ1 - 2*cHQ3 + 2*cHt - cll1221) - 12*cHl322*cll1221 - 12*cHQ1*cll1221 + 12*cHQ3*cll1221 - 12*cHt*cll1221 + 3*cll1221**2)*ee**2*(2*MT**2 + MZ**2)*sth**2*vevhat**4 - 256*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MT**2 + MZ**2)*sth**3*vevhat**4 + 32*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MT**2 + MZ**2)*sth**4*vevhat**4 - 1728*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth*vevhat*cmath.sqrt(2) + 1728*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**2*vevhat*cmath.sqrt(2) + 6336*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**3*vevhat*cmath.sqrt(2) - 4608*ctBRe*cth*ee*LambdaSMEFT**2*MT*MZ**2*sth**4*vevhat*cmath.sqrt(2) - 4608*ctWRe*ee*LambdaSMEFT**2*MT*MZ**2*sth**5*vevhat*cmath.sqrt(2) - 144*(13*cHDD - 3*(2*cHl311 + 2*cHl322 + 4*cHQ1 - 4*cHQ3 + 4*cHt - cll1221))*ctWRe*ee*MT*MZ**2*sth*vevhat**3*cmath.sqrt(2) + 144*cth*(13*cHDD*ctBRe - 6*cHl311*ctBRe - 6*cHl322*ctBRe - 12*cHQ1*ctBRe + 12*cHQ3*ctBRe - 12*cHt*ctBRe + 3*cll1221*ctBRe - 32*cHWB*ctWRe)*ee*MT*MZ**2*sth**2*vevhat**3*cmath.sqrt(2) + 144*(32*cHWB*ctBRe + (21*cHDD - 22*cHl311 - 22*cHl322 - 12*cHQ1 + 12*cHQ3 - 12*cHt + 11*cll1221)*ctWRe)*ee*MT*MZ**2*sth**3*vevhat**3*cmath.sqrt(2) - 1152*cth*(cHDD*ctBRe - 2*cHl311*ctBRe - 2*cHl322*ctBRe + cll1221*ctBRe - 4*cHWB*ctWRe)*ee*MT*MZ**2*sth**4*vevhat**3*cmath.sqrt(2) - 1152*(4*cHWB*ctBRe + (cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ctWRe)*ee*MT*MZ**2*sth**5*vevhat**3*cmath.sqrt(2))*cmath.sqrt(-4*MT**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.u,P.u__tilde__):'((-144*ee**2*LambdaSMEFT**4*MU**2 + 144*ee**2*LambdaSMEFT**4*MZ**2 - 384*ee**2*LambdaSMEFT**4*(2*MU**2 + MZ**2)*sth**2 + 512*ee**2*LambdaSMEFT**4*(2*MU**2 + MZ**2)*sth**4 + 24*ee**2*LambdaSMEFT**2*(19*cHDD*MU**2 + 5*cHDD*MZ**2 + 3*(-4*cHj3*MU**2 + 2*cHl311*MU**2 + 2*cHl322*MU**2 - 12*cHu*MU**2 - cll1221*MU**2 + 4*cHj3*MZ**2 - 2*cHl311*MZ**2 - 2*cHl322*MZ**2 + cll1221*MZ**2 + 4*cHj1*(MU**2 - MZ**2)))*vevhat**2 + 384*cHWB*cth*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2)*sth*vevhat**2 + ee**2*(cHDD**2*(151*MU**2 + 89*MZ**2) - 6*cHDD*(76*cHj1*MU**2 - 76*cHj3*MU**2 + 38*cHl311*MU**2 + 38*cHl322*MU**2 + 28*cHu*MU**2 - 19*cll1221*MU**2 + 20*cHj1*MZ**2 - 20*cHj3*MZ**2 + 10*cHl311*MZ**2 + 10*cHl322*MZ**2 + 32*cHu*MZ**2 - 5*cll1221*MZ**2) - 9*(4*cHl311**2*MU**2 + 8*cHl311*cHl322*MU**2 + 4*cHl322**2*MU**2 - 48*cHl311*cHu*MU**2 - 48*cHl322*cHu*MU**2 + 16*cHu**2*MU**2 - 4*cHl311*cll1221*MU**2 - 4*cHl322*cll1221*MU**2 + 24*cHu*cll1221*MU**2 + cll1221**2*MU**2 - 4*cHl311**2*MZ**2 - 8*cHl311*cHl322*MZ**2 - 4*cHl322**2*MZ**2 - 16*cHu**2*MZ**2 + 4*cHl311*cll1221*MZ**2 + 4*cHl322*cll1221*MZ**2 - cll1221**2*MZ**2 + 16*cHj1**2*(MU**2 - MZ**2) + 16*cHj3**2*(MU**2 - MZ**2) + 8*cHj3*(-2*cHl311*MU**2 - 2*cHl322*MU**2 + 12*cHu*MU**2 + cll1221*MU**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2) - 8*cHj1*(-2*cHl311*MU**2 - 2*cHl322*MU**2 + 12*cHu*MU**2 + cll1221*MU**2 + 2*cHl311*MZ**2 + 2*cHl322*MZ**2 - cll1221*MZ**2 + 4*cHj3*(MU**2 - MZ**2))))*vevhat**4 + 32*cHWB*(13*cHDD - 3*(4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221))*cth*ee**2*(2*MU**2 + MZ**2)*sth*vevhat**4 - 8*(13*cHDD**2 - 24*cHj3*cHl311 + 12*cHl311**2 - 24*cHj3*cHl322 + 24*cHl311*cHl322 + 12*cHl322**2 + 24*cHl311*cHu + 24*cHl322*cHu - 64*cHWB**2 - 4*cHDD*(3*cHj1 - 3*cHj3 + 8*cHl311 + 8*cHl322 + 3*cHu - 4*cll1221) + 12*cHj1*(2*cHl311 + 2*cHl322 - cll1221) + 12*cHj3*cll1221 - 12*cHl311*cll1221 - 12*cHl322*cll1221 - 12*cHu*cll1221 + 3*cll1221**2)*ee**2*(2*MU**2 + MZ**2)*sth**2*vevhat**4 - 256*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*(2*MU**2 + MZ**2)*sth**3*vevhat**4 + 32*(cHDD**2 + 4*cHl311**2 + 8*cHl311*cHl322 + 4*cHl322**2 - 16*cHWB**2 - 4*cHl311*cll1221 - 4*cHl322*cll1221 + cll1221**2 + cHDD*(-4*cHl311 - 4*cHl322 + 2*cll1221))*ee**2*(2*MU**2 + MZ**2)*sth**4*vevhat**4 + 2304*cth*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*sth**5*vevhat**2*yup**2 + 1152*MZ**2*(-4*cuWIm**2*MU**2 + 8*cuWRe**2*MU**2 + cuWIm**2*MZ**2 + cuWRe**2*MZ**2 + cuBIm**2*(4*MU**2 - MZ**2) - cuBRe**2*(8*MU**2 + MZ**2))*sth**6*vevhat**2*yup**2 + 128*sth**4*vevhat**2*(-8*cHl322*ee**2*LambdaSMEFT**2*MU**2 + 4*cll1221*ee**2*LambdaSMEFT**2*MU**2 - 4*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHDD*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 4*cHl311*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 36*cuBIm**2*MU**2*MZ**2*yup**2 + 72*cuBRe**2*MU**2*MZ**2*yup**2 + 72*cuWIm**2*MU**2*MZ**2*yup**2 - 144*cuWRe**2*MU**2*MZ**2*yup**2 + 9*cuBIm**2*MZ**4*yup**2 + 9*cuBRe**2*MZ**4*yup**2 - 18*cuWIm**2*MZ**4*yup**2 - 18*cuWRe**2*MZ**4*yup**2) - 256*cth*sth**3*vevhat**2*(4*cHWB*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) + 9*MZ**2*(cuBIm*cuWIm*(-4*MU**2 + MZ**2) + cuBRe*cuWRe*(8*MU**2 + MZ**2))*yup**2) - 64*sth**2*vevhat**2*(8*cHDD*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 3*(4*cHl311*ee**2*LambdaSMEFT**2*MU**2 + 4*cHl322*ee**2*LambdaSMEFT**2*MU**2 + 4*cHu*ee**2*LambdaSMEFT**2*MU**2 - 2*cll1221*ee**2*LambdaSMEFT**2*MU**2 + 2*cHl311*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHl322*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHu*ee**2*LambdaSMEFT**2*MZ**2 - cll1221*ee**2*LambdaSMEFT**2*MZ**2 + 2*cHj1*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 2*cHj3*ee**2*LambdaSMEFT**2*(2*MU**2 + MZ**2) - 24*cuWIm**2*MU**2*MZ**2*yup**2 + 48*cuWRe**2*MU**2*MZ**2*yup**2 + 6*cuWIm**2*MZ**4*yup**2 + 6*cuWRe**2*MZ**4*yup**2)) - 1728*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth*vevhat*yup*cmath.sqrt(2) + 1728*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**2*vevhat*yup*cmath.sqrt(2) + 6336*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**3*vevhat*yup*cmath.sqrt(2) - 4608*cth*cuBRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**4*vevhat*yup*cmath.sqrt(2) - 4608*cuWRe*ee*LambdaSMEFT**2*MU*MZ**2*sth**5*vevhat*yup*cmath.sqrt(2) - 144*(13*cHDD - 3*(4*cHj1 - 4*cHj3 + 2*cHl311 + 2*cHl322 + 4*cHu - cll1221))*cuWRe*ee*MU*MZ**2*sth*vevhat**3*yup*cmath.sqrt(2) + 144*cth*(13*cHDD*cuBRe - 12*cHj1*cuBRe + 12*cHj3*cuBRe - 6*cHl311*cuBRe - 6*cHl322*cuBRe - 12*cHu*cuBRe + 3*cll1221*cuBRe - 32*cHWB*cuWRe)*ee*MU*MZ**2*sth**2*vevhat**3*yup*cmath.sqrt(2) + 144*(32*cHWB*cuBRe + (21*cHDD - 12*cHj1 + 12*cHj3 - 22*cHl311 - 22*cHl322 - 12*cHu + 11*cll1221)*cuWRe)*ee*MU*MZ**2*sth**3*vevhat**3*yup*cmath.sqrt(2) - 1152*cth*(cHDD*cuBRe - 2*cHl311*cuBRe - 2*cHl322*cuBRe + cll1221*cuBRe - 4*cHWB*cuWRe)*ee*MU*MZ**2*sth**4*vevhat**3*yup*cmath.sqrt(2) - 1152*(4*cHWB*cuBRe + (cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cuWRe)*ee*MU*MZ**2*sth**5*vevhat**3*yup*cmath.sqrt(2))*cmath.sqrt(-4*MU**2 + MZ**2))/(4608.*cth**2*cmath.pi*LambdaSMEFT**4*MZ**2*sth**2)',
(P.ve,P.ve__tilde__):'(MZ*(4*ee*LambdaSMEFT**2 + (-cHDD - 4*cHl111 + 2*cHl311 - 2*cHl322 + cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.vm,P.vm__tilde__):'(MZ*(4*ee*LambdaSMEFT**2 + (-cHDD - 4*cHl122 - 2*cHl311 + 2*cHl322 + cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.vt,P.vt__tilde__):'(MZ*(-4*ee*LambdaSMEFT**2 + (cHDD + 4*cHl133 + 2*cHl311 + 2*cHl322 - 4*cHl333 - cll1221)*ee*vevhat**2)**2)/(1536.*cth**2*cmath.pi*LambdaSMEFT**4*sth**2)',
(P.W__minus__,P.W__plus__):'((-768*ee**2*LambdaSMEFT**4*MW**6 - 1088*ee**2*LambdaSMEFT**4*MW**4*MZ**2 + 256*ee**2*LambdaSMEFT**4*MW**2*MZ**4 + 16*ee**2*LambdaSMEFT**4*MZ**6 + 1920*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth + 16*(ee**2*LambdaSMEFT**4*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 8*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6)))*sth**2 - 1920*cW*ee*LambdaSMEFT**2*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**3 + 128*MW**2*MZ**2*(9*cW**2*(8*MW**6 - 6*MW**4*MZ**2 + 9*MW**2*MZ**4 - 2*MZ**6) - cWtil**2*(36*MW**6 + 6*MW**4*MZ**2 - 11*MW**2*MZ**4 + 2*MZ**6))*sth**4 - 8*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*LambdaSMEFT**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*vevhat**2 + 480*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cW*ee*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth*vevhat**2 - 64*cHWB*cth*ee**2*LambdaSMEFT**2*MW**2*(24*MW**4 + 14*MW**2*MZ**2 - 5*MZ**4)*sth*vevhat**2 + 8*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*ee**2*LambdaSMEFT**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*sth**2*vevhat**2 + 256*cth*ee*MW**2*MZ**2*(3*cHWB*cW*(4*MW**4 + 3*MW**2*MZ**2 - MZ**4) - cHWBtil*cWtil*(6*MW**4 - 4*MW**2*MZ**2 + MZ**4))*sth**2*vevhat**2 - 480*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cW*ee*MW**2*MZ**4*(4*MW**2 - MZ**2)*sth**3*vevhat**2 - (cHDD - 2*cHl311 - 2*cHl322 + cll1221)**2*ee**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6)*vevhat**4 - 16*cHWB*(cHDD - 2*cHl311 - 2*cHl322 + cll1221)*cth*ee**2*MW**2*(24*MW**4 + 14*MW**2*MZ**2 - 5*MZ**4)*sth*vevhat**4 + ee**2*(192*cHl322**2*MW**6 - 768*cHWB**2*MW**6 - 192*cHl322*cll1221*MW**6 + 48*cll1221**2*MW**6 + 272*cHl322**2*MW**4*MZ**2 - 64*cHWB**2*MW**4*MZ**2 + 128*cHWBtil**2*MW**4*MZ**2 - 272*cHl322*cll1221*MW**4*MZ**2 + 68*cll1221**2*MW**4*MZ**2 - 64*cHl322**2*MW**2*MZ**4 + 64*cHWB**2*MW**2*MZ**4 + 64*cHWBtil**2*MW**2*MZ**4 + 64*cHl322*cll1221*MW**2*MZ**4 - 16*cll1221**2*MW**2*MZ**4 - 4*cHl322**2*MZ**6 + 4*cHl322*cll1221*MZ**6 - cll1221**2*MZ**6 + cHDD**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) + 4*cHl311**2*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) + 4*cHl311*(2*cHl322 - cll1221)*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6) - 2*cHDD*(2*cHl311 + 2*cHl322 - cll1221)*(48*MW**6 + 68*MW**4*MZ**2 - 16*MW**2*MZ**4 - MZ**6))*sth**2*vevhat**4)*cmath.sqrt(-4*MW**2 + MZ**2))/(3072.*cmath.pi*LambdaSMEFT**4*MW**4*MZ**2*sth**2)'})
| 1,033.674157
| 5,020
| 0.571736
| 20,354
| 91,997
| 2.570895
| 0.011103
| 0.039329
| 0.037532
| 0.056757
| 0.918227
| 0.89455
| 0.843086
| 0.816905
| 0.774079
| 0.733393
| 0
| 0.207189
| 0.105493
| 91,997
| 88
| 5,021
| 1,045.420455
| 0.428693
| 0.001663
| 0
| 0
| 0
| 0.623188
| 0.962118
| 0.494986
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028986
| 0
| 0.028986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
11b3b5c3111958e1739f1463f6b1fceea178e2d0
| 36
|
py
|
Python
|
python-project/pythonproject/__init__.py
|
hgrif/oozie-pyspark-workflow
|
5351e24800766e9836d5c022d9ad8769d9d24faf
|
[
"CNRI-Python"
] | 14
|
2017-08-11T12:53:16.000Z
|
2021-02-16T16:11:37.000Z
|
python-project/pythonproject/__init__.py
|
hgrif/oozie-pyspark-workflow
|
5351e24800766e9836d5c022d9ad8769d9d24faf
|
[
"CNRI-Python"
] | null | null | null |
python-project/pythonproject/__init__.py
|
hgrif/oozie-pyspark-workflow
|
5351e24800766e9836d5c022d9ad8769d9d24faf
|
[
"CNRI-Python"
] | 6
|
2017-05-23T08:00:03.000Z
|
2020-07-16T15:20:44.000Z
|
from . import foo
from . import bar
| 12
| 17
| 0.722222
| 6
| 36
| 4.333333
| 0.666667
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 36
| 2
| 18
| 18
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
11c5bfc2fd939fc32c636689831b43afbf71f165
| 7,106
|
py
|
Python
|
models.py
|
Eudialyte/SepGAT
|
6ea77714d1b2f2f5d0857cddcc9f1f5f9c0bcf50
|
[
"MIT"
] | null | null | null |
models.py
|
Eudialyte/SepGAT
|
6ea77714d1b2f2f5d0857cddcc9f1f5f9c0bcf50
|
[
"MIT"
] | null | null | null |
models.py
|
Eudialyte/SepGAT
|
6ea77714d1b2f2f5d0857cddcc9f1f5f9c0bcf50
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch.nn.functional as F
from layers import GraphConvolution, SGATLayer, SGATMultiLayer # , SGAT1pLayer, SGATMultiLayer
class GCN(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout, node_dropout, edge_dropout):
super(GCN, self).__init__()
self.gc1 = GraphConvolution(nfeat, nhid)
self.gc2 = GraphConvolution(nhid, nclass)
self.dropout = dropout
self.node_dropout = node_dropout
self.edge_dropout = edge_dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = F.relu(self.gc1(x, adj, self.node_dropout, self.edge_dropout))
x = F.dropout(x, self.dropout, training=self.training)
x = self.gc2(x, adj, self.node_dropout, self.edge_dropout)
return x
class GCN1(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout):
super(GCN1, self).__init__()
self.gc1 = GraphConvolution(nfeat, nclass)
self.dropout = dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = self.gc1(x, adj)
return x
class GCN_Linear(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout):
super(GCN_Linear, self).__init__()
self.gc1 = GraphConvolution(nfeat, nhid)
self.linear2 = nn.Linear(nhid, nclass, bias=True)
self.dropout = dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = F.relu(self.gc1(x, adj))
x = F.dropout(x, self.dropout, training=self.training)
x = self.linear2(x)
return x
class Linear_GCN(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout):
super(Linear_GCN, self).__init__()
self.linear1 = nn.Linear(nfeat, nhid, bias=True)
self.gc2 = GraphConvolution(nhid, nclass)
self.dropout = dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = F.relu(self.linear1(x))
x = F.dropout(x, self.dropout, training=self.training)
x = self.gc2(x)
return x
class Linear(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout):
super(Linear, self).__init__()
self.linear1 = nn.Linear(nfeat, nclass, bias=True)
self.dropout = dropout
def forward(self, x, adj=None):
x = F.dropout(x, self.dropout, training=self.training)
x = self.linear1(x)
return x
class Linear2(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout):
super(Linear2, self).__init__()
self.linear1 = nn.Linear(nfeat, nhid, bias=True)
self.linear2 = nn.Linear(nhid, nclass, bias=True)
self.dropout = dropout
def forward(self, x, adj=None):
x = F.dropout(x, self.dropout, training=self.training)
x = F.relu(self.linear1(x))
x = F.dropout(x, self.dropout, training=self.training)
x = self.linear2(x)
return x
class SGAT(nn.Module):
def __init__(self, nfeat, nhid, nhead, nhead2, nclass, dropout=0.0,
node_dropout=0.0, edge_dropout=0.0,
pre_attn_order=1, post_attn_order=1,
pre_attn_appnp=False, pre_appnp_alpha=0.1,
post_attn_appnp=False, post_appnp_alpha=0.1, device='cpu'):
super(SGAT, self).__init__()
self.layer1 = SGATLayer(nfeat, nhid, nhead,
node_dropout=node_dropout,
edge_dropout=edge_dropout,
pre_attn_order=pre_attn_order,
post_attn_order=post_attn_order,
pre_attn_appnp=pre_attn_appnp,
pre_appnp_alpha=pre_appnp_alpha,
post_attn_appnp=post_attn_appnp,
post_appnp_alpha=post_appnp_alpha,
bias=True, mean=False, device=device)
self.layer2 = SGATLayer(nhid * nhead, nclass, nhead2,
node_dropout=node_dropout,
edge_dropout=edge_dropout,
pre_attn_order=pre_attn_order,
post_attn_order=post_attn_order,
pre_attn_appnp=pre_attn_appnp,
pre_appnp_alpha=pre_appnp_alpha,
post_attn_appnp=post_attn_appnp,
post_appnp_alpha=post_appnp_alpha,
bias=False, mean=True, device=device)
self.dropout = dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = F.elu(self.layer1(x, adj))
x = F.dropout(x, self.dropout, training=self.training)
x = self.layer2(x, adj)
return x
class SGAT_multi(nn.Module):
def __init__(self, nfeat, nhid, nhead, nhead2, nbase, nclass, dropout=0.0,
node_dropout=0.0, edge_dropout=0.0,
pre_attn_order=1, post_attn_order=1,
pre_attn_appnp=False, pre_appnp_alpha=0.1,
post_attn_appnp=False, post_appnp_alpha=0.1, device='cpu'):
super(SGAT_multi, self).__init__()
self.layer1 = SGATMultiLayer(nfeat, nhid, nhead, nbase,
node_dropout=node_dropout,
edge_dropout=edge_dropout,
pre_attn_order=pre_attn_order,
post_attn_order=post_attn_order,
pre_attn_appnp=pre_attn_appnp,
pre_appnp_alpha=pre_appnp_alpha,
post_attn_appnp=post_attn_appnp,
post_appnp_alpha=post_appnp_alpha,
bias=True, mean=False, device=device)
self.layer2 = SGATMultiLayer(nhid * nhead, nclass, nhead2, nbase,
node_dropout=node_dropout,
edge_dropout=edge_dropout,
pre_attn_order=pre_attn_order,
post_attn_order=post_attn_order,
pre_attn_appnp=pre_attn_appnp,
pre_appnp_alpha=pre_appnp_alpha,
post_attn_appnp=post_attn_appnp,
post_appnp_alpha=post_appnp_alpha,
bias=False, mean=True, device=device)
self.dropout = dropout
def forward(self, x, adj):
x = F.dropout(x, self.dropout, training=self.training)
x = F.elu(self.layer1(x, adj))
x = F.dropout(x, self.dropout, training=self.training)
x = self.layer2(x, adj)
return x
| 41.555556
| 94
| 0.548269
| 829
| 7,106
| 4.434258
| 0.072376
| 0.065832
| 0.034276
| 0.038085
| 0.890642
| 0.866975
| 0.857182
| 0.848477
| 0.789445
| 0.768226
| 0
| 0.013383
| 0.35857
| 7,106
| 171
| 95
| 41.555556
| 0.793111
| 0.004081
| 0
| 0.741007
| 0
| 0
| 0.000848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115108
| false
| 0
| 0.021583
| 0
| 0.251799
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eed51d3bcc0e2c930f386d4e33e0ad45f445c570
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_sion/na_sion_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_sion/na_sion_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_sion/na_sion_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Sion_Top_Aatrox(Ratings):
pass
class NA_Sion_Top_Ahri(Ratings):
pass
class NA_Sion_Top_Akali(Ratings):
pass
class NA_Sion_Top_Alistar(Ratings):
pass
class NA_Sion_Top_Amumu(Ratings):
pass
class NA_Sion_Top_Anivia(Ratings):
pass
class NA_Sion_Top_Annie(Ratings):
pass
class NA_Sion_Top_Ashe(Ratings):
pass
class NA_Sion_Top_AurelionSol(Ratings):
pass
class NA_Sion_Top_Azir(Ratings):
pass
class NA_Sion_Top_Bard(Ratings):
pass
class NA_Sion_Top_Blitzcrank(Ratings):
pass
class NA_Sion_Top_Brand(Ratings):
pass
class NA_Sion_Top_Braum(Ratings):
pass
class NA_Sion_Top_Caitlyn(Ratings):
pass
class NA_Sion_Top_Camille(Ratings):
pass
class NA_Sion_Top_Cassiopeia(Ratings):
pass
class NA_Sion_Top_Chogath(Ratings):
pass
class NA_Sion_Top_Corki(Ratings):
pass
class NA_Sion_Top_Darius(Ratings):
pass
class NA_Sion_Top_Diana(Ratings):
pass
class NA_Sion_Top_Draven(Ratings):
pass
class NA_Sion_Top_DrMundo(Ratings):
pass
class NA_Sion_Top_Ekko(Ratings):
pass
class NA_Sion_Top_Elise(Ratings):
pass
class NA_Sion_Top_Evelynn(Ratings):
pass
class NA_Sion_Top_Ezreal(Ratings):
pass
class NA_Sion_Top_Fiddlesticks(Ratings):
pass
class NA_Sion_Top_Fiora(Ratings):
pass
class NA_Sion_Top_Fizz(Ratings):
pass
class NA_Sion_Top_Galio(Ratings):
pass
class NA_Sion_Top_Gangplank(Ratings):
pass
class NA_Sion_Top_Garen(Ratings):
pass
class NA_Sion_Top_Gnar(Ratings):
pass
class NA_Sion_Top_Gragas(Ratings):
pass
class NA_Sion_Top_Graves(Ratings):
pass
class NA_Sion_Top_Hecarim(Ratings):
pass
class NA_Sion_Top_Heimerdinger(Ratings):
pass
class NA_Sion_Top_Illaoi(Ratings):
pass
class NA_Sion_Top_Irelia(Ratings):
pass
class NA_Sion_Top_Ivern(Ratings):
pass
class NA_Sion_Top_Janna(Ratings):
pass
class NA_Sion_Top_JarvanIV(Ratings):
pass
class NA_Sion_Top_Jax(Ratings):
pass
class NA_Sion_Top_Jayce(Ratings):
pass
class NA_Sion_Top_Jhin(Ratings):
pass
class NA_Sion_Top_Jinx(Ratings):
pass
class NA_Sion_Top_Kalista(Ratings):
pass
class NA_Sion_Top_Karma(Ratings):
pass
class NA_Sion_Top_Karthus(Ratings):
pass
class NA_Sion_Top_Kassadin(Ratings):
pass
class NA_Sion_Top_Katarina(Ratings):
pass
class NA_Sion_Top_Kayle(Ratings):
pass
class NA_Sion_Top_Kayn(Ratings):
pass
class NA_Sion_Top_Kennen(Ratings):
pass
class NA_Sion_Top_Khazix(Ratings):
pass
class NA_Sion_Top_Kindred(Ratings):
pass
class NA_Sion_Top_Kled(Ratings):
pass
class NA_Sion_Top_KogMaw(Ratings):
pass
class NA_Sion_Top_Leblanc(Ratings):
pass
class NA_Sion_Top_LeeSin(Ratings):
pass
class NA_Sion_Top_Leona(Ratings):
pass
class NA_Sion_Top_Lissandra(Ratings):
pass
class NA_Sion_Top_Lucian(Ratings):
pass
class NA_Sion_Top_Lulu(Ratings):
pass
class NA_Sion_Top_Lux(Ratings):
pass
class NA_Sion_Top_Malphite(Ratings):
pass
class NA_Sion_Top_Malzahar(Ratings):
pass
class NA_Sion_Top_Maokai(Ratings):
pass
class NA_Sion_Top_MasterYi(Ratings):
pass
class NA_Sion_Top_MissFortune(Ratings):
pass
class NA_Sion_Top_MonkeyKing(Ratings):
pass
class NA_Sion_Top_Mordekaiser(Ratings):
pass
class NA_Sion_Top_Morgana(Ratings):
pass
class NA_Sion_Top_Nami(Ratings):
pass
class NA_Sion_Top_Nasus(Ratings):
pass
class NA_Sion_Top_Nautilus(Ratings):
pass
class NA_Sion_Top_Nidalee(Ratings):
pass
class NA_Sion_Top_Nocturne(Ratings):
pass
class NA_Sion_Top_Nunu(Ratings):
pass
class NA_Sion_Top_Olaf(Ratings):
pass
class NA_Sion_Top_Orianna(Ratings):
pass
class NA_Sion_Top_Ornn(Ratings):
pass
class NA_Sion_Top_Pantheon(Ratings):
pass
class NA_Sion_Top_Poppy(Ratings):
pass
class NA_Sion_Top_Quinn(Ratings):
pass
class NA_Sion_Top_Rakan(Ratings):
pass
class NA_Sion_Top_Rammus(Ratings):
pass
class NA_Sion_Top_RekSai(Ratings):
pass
class NA_Sion_Top_Renekton(Ratings):
pass
class NA_Sion_Top_Rengar(Ratings):
pass
class NA_Sion_Top_Riven(Ratings):
pass
class NA_Sion_Top_Rumble(Ratings):
pass
class NA_Sion_Top_Ryze(Ratings):
pass
class NA_Sion_Top_Sejuani(Ratings):
pass
class NA_Sion_Top_Shaco(Ratings):
pass
class NA_Sion_Top_Shen(Ratings):
pass
class NA_Sion_Top_Shyvana(Ratings):
pass
class NA_Sion_Top_Singed(Ratings):
pass
class NA_Sion_Top_Sion(Ratings):
pass
class NA_Sion_Top_Sivir(Ratings):
pass
class NA_Sion_Top_Skarner(Ratings):
pass
class NA_Sion_Top_Sona(Ratings):
pass
class NA_Sion_Top_Soraka(Ratings):
pass
class NA_Sion_Top_Swain(Ratings):
pass
class NA_Sion_Top_Syndra(Ratings):
pass
class NA_Sion_Top_TahmKench(Ratings):
pass
class NA_Sion_Top_Taliyah(Ratings):
pass
class NA_Sion_Top_Talon(Ratings):
pass
class NA_Sion_Top_Taric(Ratings):
pass
class NA_Sion_Top_Teemo(Ratings):
pass
class NA_Sion_Top_Thresh(Ratings):
pass
class NA_Sion_Top_Tristana(Ratings):
pass
class NA_Sion_Top_Trundle(Ratings):
pass
class NA_Sion_Top_Tryndamere(Ratings):
pass
class NA_Sion_Top_TwistedFate(Ratings):
pass
class NA_Sion_Top_Twitch(Ratings):
pass
class NA_Sion_Top_Udyr(Ratings):
pass
class NA_Sion_Top_Urgot(Ratings):
pass
class NA_Sion_Top_Varus(Ratings):
pass
class NA_Sion_Top_Vayne(Ratings):
pass
class NA_Sion_Top_Veigar(Ratings):
pass
class NA_Sion_Top_Velkoz(Ratings):
pass
class NA_Sion_Top_Vi(Ratings):
pass
class NA_Sion_Top_Viktor(Ratings):
pass
class NA_Sion_Top_Vladimir(Ratings):
pass
class NA_Sion_Top_Volibear(Ratings):
pass
class NA_Sion_Top_Warwick(Ratings):
pass
class NA_Sion_Top_Xayah(Ratings):
pass
class NA_Sion_Top_Xerath(Ratings):
pass
class NA_Sion_Top_XinZhao(Ratings):
pass
class NA_Sion_Top_Yasuo(Ratings):
pass
class NA_Sion_Top_Yorick(Ratings):
pass
class NA_Sion_Top_Zac(Ratings):
pass
class NA_Sion_Top_Zed(Ratings):
pass
class NA_Sion_Top_Ziggs(Ratings):
pass
class NA_Sion_Top_Zilean(Ratings):
pass
class NA_Sion_Top_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e10d0ac1bf1de0691c74b3b3c2872cf529d34a51
| 137
|
py
|
Python
|
can_tools/scrapers/official/WI/__init__.py
|
jrybacek/can-scrapers
|
1a32a45be6aa6630de4d100c56c2a8659a1b1025
|
[
"MIT"
] | null | null | null |
can_tools/scrapers/official/WI/__init__.py
|
jrybacek/can-scrapers
|
1a32a45be6aa6630de4d100c56c2a8659a1b1025
|
[
"MIT"
] | null | null | null |
can_tools/scrapers/official/WI/__init__.py
|
jrybacek/can-scrapers
|
1a32a45be6aa6630de4d100c56c2a8659a1b1025
|
[
"MIT"
] | null | null | null |
from can_tools.scrapers.official.WI.wi_state import WisconsinCounties
from can_tools.scrapers.official.WI.wi_state import WisconsinState
| 45.666667
| 69
| 0.883212
| 20
| 137
| 5.85
| 0.5
| 0.119658
| 0.205128
| 0.34188
| 0.735043
| 0.735043
| 0.735043
| 0.735043
| 0.735043
| 0
| 0
| 0
| 0.058394
| 137
| 2
| 70
| 68.5
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.