hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
be48f270c8145112d75ff74becde39378ff59bb5
| 90
|
py
|
Python
|
IPython/external/simplegeneric/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 26
|
2018-02-14T23:52:58.000Z
|
2021-08-16T13:50:03.000Z
|
IPython/external/simplegeneric/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 3
|
2015-04-01T13:14:57.000Z
|
2015-05-26T16:01:37.000Z
|
IPython/external/simplegeneric/__init__.py
|
dchichkov/ipython
|
8096bb8640ee7e7c5ebdf3f428fe69cd390e1cd4
|
[
"BSD-3-Clause-Clear"
] | 10
|
2018-08-13T19:38:39.000Z
|
2020-04-19T03:02:00.000Z
|
try:
from simplegeneric import *
except ImportError:
from _simplegeneric import *
| 18
| 32
| 0.744444
| 9
| 90
| 7.333333
| 0.666667
| 0.515152
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211111
| 90
| 4
| 33
| 22.5
| 0.929577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be5407ccc64b3c3eb8f4a30be8b1af67d9e09376
| 58,661
|
py
|
Python
|
tests/language/test_visitor.py
|
dfee/graphql-core-next
|
1ada7146bd0510171ae931b68f6c77dbdf5d5c63
|
[
"MIT"
] | null | null | null |
tests/language/test_visitor.py
|
dfee/graphql-core-next
|
1ada7146bd0510171ae931b68f6c77dbdf5d5c63
|
[
"MIT"
] | null | null | null |
tests/language/test_visitor.py
|
dfee/graphql-core-next
|
1ada7146bd0510171ae931b68f6c77dbdf5d5c63
|
[
"MIT"
] | null | null | null |
from copy import copy
from pytest import fail
from graphql.language import (
Node, FieldNode, NameNode, SelectionSetNode, parse, print_ast,
visit, BREAK, REMOVE, SKIP, ParallelVisitor, TypeInfoVisitor, Visitor)
from graphql.type import get_named_type, is_composite_type
from graphql.utilities import TypeInfo
from ..validation.harness import test_schema
# noinspection PyUnresolvedReferences
from . import kitchen_sink # noqa: F401
def get_node_by_path(ast, path):
result = ast
for key in path:
if isinstance(key, int):
assert isinstance(result, list)
try:
result = result[key]
except IndexError:
fail(f'invalid index {key} in node list {result}')
elif isinstance(key, str):
assert isinstance(result, Node)
try:
result = getattr(result, key)
except AttributeError:
fail(f'invalid key {key} in node {result}')
else:
fail(f'invalid key {key!r} in path {path}')
return result
def check_visitor_fn_args(
ast, node, key, parent, path, ancestors, is_edited=False):
assert isinstance(node, Node)
is_root = key is None
if is_root:
if not is_edited:
assert node is ast
assert parent is None
assert path == []
assert ancestors == []
return
assert isinstance(key, (int, str))
assert get_node_by_path(parent, [key]) is not None
assert isinstance(path, list)
assert path[-1] == key
assert isinstance(ancestors, list)
assert len(ancestors) == len(path) - 1
if not is_edited:
assert get_node_by_path(parent, [key]) is node
assert get_node_by_path(ast, path) is node
for i, ancestor in enumerate(ancestors):
ancestor_path = path[:i]
assert ancestor == get_node_by_path(ast, ancestor_path)
def describe_visitor():
def validates_path_argument():
ast = parse('{ a }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
visited.append(['enter', *args[3]])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
visited.append(['leave', *args[3]])
visit(ast, TestVisitor())
assert visited == [
['enter'],
['enter', 'definitions', 0],
['enter', 'definitions', 0, 'selection_set'],
['enter', 'definitions', 0, 'selection_set', 'selections', 0],
['enter',
'definitions', 0, 'selection_set', 'selections', 0, 'name'],
['leave',
'definitions', 0, 'selection_set', 'selections', 0, 'name'],
['leave', 'definitions', 0, 'selection_set', 'selections', 0],
['leave', 'definitions', 0, 'selection_set'],
['leave', 'definitions', 0],
['leave']]
def validates_ancestors_argument():
ast = parse('{ a }', no_location=True)
visited_nodes = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, node, key, parent, path, ancestors):
in_array = isinstance(key, int)
if in_array:
visited_nodes.append(parent)
visited_nodes.append(node)
expected_ancestors = visited_nodes[0:-2]
assert ancestors == expected_ancestors
def leave(self, node, key, parent, path, ancestors):
expected_ancestors = visited_nodes[0:-2]
assert ancestors == expected_ancestors
in_array = isinstance(key, int)
if in_array:
visited_nodes.pop()
visited_nodes.pop()
visit(ast, TestVisitor())
def allows_editing_a_node_both_on_enter_and_on_leave():
ast = parse('{ a, b, c { a, b, c } }', no_location=True)
visited = []
class TestVisitor(Visitor):
selection_set = None
def enter_operation_definition(self, *args):
check_visitor_fn_args(ast, *args)
node = copy(args[0])
assert len(node.selection_set.selections) == 3
self.selection_set = node.selection_set
node.selection_set = SelectionSetNode(selections=[])
visited.append('enter')
return node
def leave_operation_definition(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = copy(args[0])
assert not node.selection_set.selections
node.selection_set = self.selection_set
visited.append('leave')
return node
edited_ast = visit(ast, TestVisitor())
assert edited_ast == ast
assert visited == ['enter', 'leave']
def allows_for_editing_on_enter():
ast = parse('{ a, b, c { a, b, c } }', no_location=True)
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
if isinstance(node, FieldNode) and node.name.value == 'b':
return REMOVE
edited_ast = visit(ast, TestVisitor())
assert ast == parse('{ a, b, c { a, b, c } }', no_location=True)
assert edited_ast == parse('{ a, c { a, c } }', no_location=True)
def allows_for_editing_on_leave():
ast = parse('{ a, b, c { a, b, c } }', no_location=True)
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def leave(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = args[0]
if isinstance(node, FieldNode) and node.name.value == 'b':
return REMOVE
edited_ast = visit(ast, TestVisitor())
assert ast == parse('{ a, b, c { a, b, c } }', no_location=True)
assert edited_ast == parse('{ a, c { a, c } }', no_location=True)
def visits_edited_node():
ast = parse('{ a { x } }', no_location=True)
added_field = FieldNode(name=NameNode(value='__typename'))
class TestVisitor(Visitor):
did_visit_added_field = False
def enter(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = args[0]
if isinstance(node, FieldNode) and node.name.value == 'a':
node = copy(node)
# noinspection PyTypeChecker
node.selection_set.selections = [
added_field] + node.selection_set.selections
return node
if node == added_field:
self.did_visit_added_field = True
visitor = TestVisitor()
visit(ast, visitor)
assert visitor.did_visit_added_field
def allows_skipping_a_sub_tree():
ast = parse('{ a, b { x }, c }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
if kind == 'field' and node.name.value == 'b':
return SKIP
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, TestVisitor())
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'operation_definition', None],
['leave', 'document', None]]
def allows_early_exit_while_visiting():
ast = parse('{ a, b { x }, c }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
if kind == 'name' and node.value == 'x':
return BREAK
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, TestVisitor())
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'x']]
def allows_early_exit_while_leaving():
ast = parse('{ a, b { x }, c }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
if kind == 'name' and node.value == 'x':
return BREAK
visit(ast, TestVisitor())
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'x'],
['leave', 'name', 'x']]
def allows_a_named_functions_visitor_api():
ast = parse('{ a, b { x }, c }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter_name(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def enter_selection_set(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave_selection_set(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, TestVisitor())
assert visited == [
['enter', 'selection_set', None],
['enter', 'name', 'a'],
['enter', 'name', 'b'],
['enter', 'selection_set', None],
['enter', 'name', 'x'],
['leave', 'selection_set', None],
['enter', 'name', 'c'],
['leave', 'selection_set', None]]
def experimental_visits_variables_defined_in_fragments():
ast = parse('fragment a($v: Boolean = false) on t { f }',
no_location=True, experimental_fragment_variables=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, TestVisitor())
assert visited == [
['enter', 'document', None],
['enter', 'fragment_definition', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['enter', 'variable_definition', None],
['enter', 'variable', None],
['enter', 'name', 'v'],
['leave', 'name', 'v'],
['leave', 'variable', None],
['enter', 'named_type', None],
['enter', 'name', 'Boolean'],
['leave', 'name', 'Boolean'],
['leave', 'named_type', None],
['enter', 'boolean_value', False],
['leave', 'boolean_value', False],
['leave', 'variable_definition', None],
['enter', 'named_type', None],
['enter', 'name', 't'],
['leave', 'name', 't'],
['leave', 'named_type', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'f'],
['leave', 'name', 'f'],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'fragment_definition', None],
['leave', 'document', None]]
# noinspection PyShadowingNames
def visits_kitchen_sink(kitchen_sink): # noqa: F811
ast = parse(kitchen_sink)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node, key, parent = args[:3]
parent_kind = parent.kind if isinstance(parent, Node) else None
visited.append(['enter', node.kind, key, parent_kind])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node, key, parent = args[:3]
parent_kind = parent.kind if isinstance(parent, Node) else None
visited.append(['leave', node.kind, key, parent_kind])
visit(ast, TestVisitor())
assert visited == [
['enter', 'document', None, None],
['enter', 'operation_definition', 0, None],
['enter', 'name', 'name', 'operation_definition'],
['leave', 'name', 'name', 'operation_definition'],
['enter', 'variable_definition', 0, None],
['enter', 'variable', 'variable', 'variable_definition'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'variable', 'variable_definition'],
['enter', 'named_type', 'type', 'variable_definition'],
['enter', 'name', 'name', 'named_type'],
['leave', 'name', 'name', 'named_type'],
['leave', 'named_type', 'type', 'variable_definition'],
['leave', 'variable_definition', 0, None],
['enter', 'variable_definition', 1, None],
['enter', 'variable', 'variable', 'variable_definition'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'variable', 'variable_definition'],
['enter', 'named_type', 'type', 'variable_definition'],
['enter', 'name', 'name', 'named_type'],
['leave', 'name', 'name', 'named_type'],
['leave', 'named_type', 'type', 'variable_definition'],
['enter', 'enum_value', 'default_value', 'variable_definition'],
['leave', 'enum_value', 'default_value', 'variable_definition'],
['leave', 'variable_definition', 1, None],
['enter', 'selection_set', 'selection_set',
'operation_definition'],
['enter', 'field', 0, None],
['enter', 'name', 'alias', 'field'],
['leave', 'name', 'alias', 'field'],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'list_value', 'value', 'argument'],
['enter', 'int_value', 0, None],
['leave', 'int_value', 0, None],
['enter', 'int_value', 1, None],
['leave', 'int_value', 1, None],
['leave', 'list_value', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['enter', 'inline_fragment', 1, None],
['enter', 'named_type', 'type_condition', 'inline_fragment'],
['enter', 'name', 'name', 'named_type'],
['leave', 'name', 'name', 'named_type'],
['leave', 'named_type', 'type_condition', 'inline_fragment'],
['enter', 'directive', 0, None],
['enter', 'name', 'name', 'directive'],
['leave', 'name', 'name', 'directive'],
['leave', 'directive', 0, None],
['enter', 'selection_set', 'selection_set', 'inline_fragment'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['enter', 'field', 1, None],
['enter', 'name', 'alias', 'field'],
['leave', 'name', 'alias', 'field'],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'int_value', 'value', 'argument'],
['leave', 'int_value', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'argument', 1, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 1, None],
['enter', 'directive', 0, None],
['enter', 'name', 'name', 'directive'],
['leave', 'name', 'name', 'directive'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 0, None],
['leave', 'directive', 0, None],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['enter', 'fragment_spread', 1, None],
['enter', 'name', 'name', 'fragment_spread'],
['leave', 'name', 'name', 'fragment_spread'],
['leave', 'fragment_spread', 1, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 1, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'inline_fragment'],
['leave', 'inline_fragment', 1, None],
['enter', 'inline_fragment', 2, None],
['enter', 'directive', 0, None],
['enter', 'name', 'name', 'directive'],
['leave', 'name', 'name', 'directive'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 0, None],
['leave', 'directive', 0, None],
['enter', 'selection_set', 'selection_set', 'inline_fragment'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'inline_fragment'],
['leave', 'inline_fragment', 2, None],
['enter', 'inline_fragment', 3, None],
['enter', 'selection_set', 'selection_set', 'inline_fragment'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'inline_fragment'],
['leave', 'inline_fragment', 3, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set',
'operation_definition'],
['leave', 'operation_definition', 0, None],
['enter', 'operation_definition', 1, None],
['enter', 'name', 'name', 'operation_definition'],
['leave', 'name', 'name', 'operation_definition'],
['enter', 'selection_set', 'selection_set',
'operation_definition'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'int_value', 'value', 'argument'],
['leave', 'int_value', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'directive', 0, None],
['enter', 'name', 'name', 'directive'],
['leave', 'name', 'name', 'directive'],
['leave', 'directive', 0, None],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set',
'operation_definition'],
['leave', 'operation_definition', 1, None],
['enter', 'operation_definition', 2, None],
['enter', 'name', 'name', 'operation_definition'],
['leave', 'name', 'name', 'operation_definition'],
['enter', 'variable_definition', 0, None],
['enter', 'variable', 'variable', 'variable_definition'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'variable', 'variable_definition'],
['enter', 'named_type', 'type', 'variable_definition'],
['enter', 'name', 'name', 'named_type'],
['leave', 'name', 'name', 'named_type'],
['leave', 'named_type', 'type', 'variable_definition'],
['leave', 'variable_definition', 0, None],
['enter', 'selection_set', 'selection_set',
'operation_definition'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['enter', 'field', 1, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'selection_set', 'selection_set', 'field'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 1, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set', 'field'],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set',
'operation_definition'],
['leave', 'operation_definition', 2, None],
['enter', 'fragment_definition', 3, None],
['enter', 'name', 'name', 'fragment_definition'],
['leave', 'name', 'name', 'fragment_definition'],
['enter', 'named_type', 'type_condition',
'fragment_definition'],
['enter', 'name', 'name', 'named_type'],
['leave', 'name', 'name', 'named_type'],
['leave', 'named_type', 'type_condition',
'fragment_definition'],
['enter', 'selection_set', 'selection_set',
'fragment_definition'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'argument', 1, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'variable', 'value', 'argument'],
['enter', 'name', 'name', 'variable'],
['leave', 'name', 'name', 'variable'],
['leave', 'variable', 'value', 'argument'],
['leave', 'argument', 1, None],
['enter', 'argument', 2, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'object_value', 'value', 'argument'],
['enter', 'object_field', 0, None],
['enter', 'name', 'name', 'object_field'],
['leave', 'name', 'name', 'object_field'],
['enter', 'string_value', 'value', 'object_field'],
['leave', 'string_value', 'value', 'object_field'],
['leave', 'object_field', 0, None],
['enter', 'object_field', 1, None],
['enter', 'name', 'name', 'object_field'],
['leave', 'name', 'name', 'object_field'],
['enter', 'string_value', 'value', 'object_field'],
['leave', 'string_value', 'value', 'object_field'],
['leave', 'object_field', 1, None],
['leave', 'object_value', 'value', 'argument'],
['leave', 'argument', 2, None],
['leave', 'field', 0, None],
['leave', 'selection_set', 'selection_set',
'fragment_definition'],
['leave', 'fragment_definition', 3, None],
['enter', 'operation_definition', 4, None],
['enter', 'selection_set', 'selection_set',
'operation_definition'],
['enter', 'field', 0, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['enter', 'argument', 0, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'boolean_value', 'value', 'argument'],
['leave', 'boolean_value', 'value', 'argument'],
['leave', 'argument', 0, None],
['enter', 'argument', 1, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'boolean_value', 'value', 'argument'],
['leave', 'boolean_value', 'value', 'argument'],
['leave', 'argument', 1, None],
['enter', 'argument', 2, None],
['enter', 'name', 'name', 'argument'],
['leave', 'name', 'name', 'argument'],
['enter', 'null_value', 'value', 'argument'],
['leave', 'null_value', 'value', 'argument'],
['leave', 'argument', 2, None],
['leave', 'field', 0, None],
['enter', 'field', 1, None],
['enter', 'name', 'name', 'field'],
['leave', 'name', 'name', 'field'],
['leave', 'field', 1, None],
['leave', 'selection_set', 'selection_set',
'operation_definition'],
['leave', 'operation_definition', 4, None],
['leave', 'document', None, None]]
def describe_visit_in_parallel():
def allows_skipping_a_sub_tree():
# Note: nearly identical to the above test but using ParallelVisitor
ast = parse('{ a, b { x }, c }')
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
if kind == 'field' and node.name.value == 'b':
return SKIP
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, ParallelVisitor([TestVisitor()]))
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'operation_definition', None],
['leave', 'document', None]]
def allows_skipping_different_sub_trees():
ast = parse('{ a { x }, b { y} }')
visited = []
class TestVisitor(Visitor):
def __init__(self, name):
self.name = name
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'no-{name}', 'enter', kind, value])
if kind == 'field' and node.name.value == name:
return SKIP
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'no-{name}', 'leave', kind, value])
visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')]))
assert visited == [
['no-a', 'enter', 'document', None],
['no-b', 'enter', 'document', None],
['no-a', 'enter', 'operation_definition', None],
['no-b', 'enter', 'operation_definition', None],
['no-a', 'enter', 'selection_set', None],
['no-b', 'enter', 'selection_set', None],
['no-a', 'enter', 'field', None],
['no-b', 'enter', 'field', None],
['no-b', 'enter', 'name', 'a'],
['no-b', 'leave', 'name', 'a'],
['no-b', 'enter', 'selection_set', None],
['no-b', 'enter', 'field', None],
['no-b', 'enter', 'name', 'x'],
['no-b', 'leave', 'name', 'x'],
['no-b', 'leave', 'field', None],
['no-b', 'leave', 'selection_set', None],
['no-b', 'leave', 'field', None],
['no-a', 'enter', 'field', None],
['no-b', 'enter', 'field', None],
['no-a', 'enter', 'name', 'b'],
['no-a', 'leave', 'name', 'b'],
['no-a', 'enter', 'selection_set', None],
['no-a', 'enter', 'field', None],
['no-a', 'enter', 'name', 'y'],
['no-a', 'leave', 'name', 'y'],
['no-a', 'leave', 'field', None],
['no-a', 'leave', 'selection_set', None],
['no-a', 'leave', 'field', None],
['no-a', 'leave', 'selection_set', None],
['no-b', 'leave', 'selection_set', None],
['no-a', 'leave', 'operation_definition', None],
['no-b', 'leave', 'operation_definition', None],
['no-a', 'leave', 'document', None],
['no-b', 'leave', 'document', None]]
def allows_early_exit_while_visiting():
# Note: nearly identical to the above test but using ParallelVisitor.
ast = parse('{ a, b { x }, c }')
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
if kind == 'name' and node.value == 'x':
return BREAK
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
visit(ast, ParallelVisitor([TestVisitor()]))
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'x']]
def allows_early_exit_from_different_points():
ast = parse('{ a { y }, b { x } }')
visited = []
class TestVisitor(Visitor):
def __init__(self, name):
self.name = name
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'break-{name}', 'enter', kind, value])
if kind == 'name' and node.value == name:
return BREAK
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'break-{name}', 'leave', kind, value])
visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')]))
assert visited == [
['break-a', 'enter', 'document', None],
['break-b', 'enter', 'document', None],
['break-a', 'enter', 'operation_definition', None],
['break-b', 'enter', 'operation_definition', None],
['break-a', 'enter', 'selection_set', None],
['break-b', 'enter', 'selection_set', None],
['break-a', 'enter', 'field', None],
['break-b', 'enter', 'field', None],
['break-a', 'enter', 'name', 'a'],
['break-b', 'enter', 'name', 'a'],
['break-b', 'leave', 'name', 'a'],
['break-b', 'enter', 'selection_set', None],
['break-b', 'enter', 'field', None],
['break-b', 'enter', 'name', 'y'],
['break-b', 'leave', 'name', 'y'],
['break-b', 'leave', 'field', None],
['break-b', 'leave', 'selection_set', None],
['break-b', 'leave', 'field', None],
['break-b', 'enter', 'field', None],
['break-b', 'enter', 'name', 'b']]
def allows_early_exit_while_leaving():
# Note: nearly identical to the above test but using ParallelVisitor.
ast = parse('{ a, b { x }, c }')
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
if kind == 'name' and node.value == 'x':
return BREAK
visit(ast, ParallelVisitor([TestVisitor()]))
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'x'],
['leave', 'name', 'x']]
def allows_early_exit_from_leaving_different_points():
ast = parse('{ a { y }, b { x } }')
visited = []
class TestVisitor(Visitor):
def __init__(self, name):
self.name = name
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'break-{name}', 'enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
name = self.name
visited.append([f'break-{name}', 'leave', kind, value])
if kind == 'field' and node.name.value == name:
return BREAK
visit(ast, ParallelVisitor([TestVisitor('a'), TestVisitor('b')]))
assert visited == [
['break-a', 'enter', 'document', None],
['break-b', 'enter', 'document', None],
['break-a', 'enter', 'operation_definition', None],
['break-b', 'enter', 'operation_definition', None],
['break-a', 'enter', 'selection_set', None],
['break-b', 'enter', 'selection_set', None],
['break-a', 'enter', 'field', None],
['break-b', 'enter', 'field', None],
['break-a', 'enter', 'name', 'a'],
['break-b', 'enter', 'name', 'a'],
['break-a', 'leave', 'name', 'a'],
['break-b', 'leave', 'name', 'a'],
['break-a', 'enter', 'selection_set', None],
['break-b', 'enter', 'selection_set', None],
['break-a', 'enter', 'field', None],
['break-b', 'enter', 'field', None],
['break-a', 'enter', 'name', 'y'],
['break-b', 'enter', 'name', 'y'],
['break-a', 'leave', 'name', 'y'],
['break-b', 'leave', 'name', 'y'],
['break-a', 'leave', 'field', None],
['break-b', 'leave', 'field', None],
['break-a', 'leave', 'selection_set', None],
['break-b', 'leave', 'selection_set', None],
['break-a', 'leave', 'field', None],
['break-b', 'leave', 'field', None],
['break-b', 'enter', 'field', None],
['break-b', 'enter', 'name', 'b'],
['break-b', 'leave', 'name', 'b'],
['break-b', 'enter', 'selection_set', None],
['break-b', 'enter', 'field', None],
['break-b', 'enter', 'name', 'x'],
['break-b', 'leave', 'name', 'x'],
['break-b', 'leave', 'field', None],
['break-b', 'leave', 'selection_set', None],
['break-b', 'leave', 'field', None]]
def allows_for_editing_on_enter():
ast = parse('{ a, b, c { a, b, c } }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor1(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
if node.kind == 'field' and node.name.value == 'b':
return REMOVE
# noinspection PyMethodMayBeStatic
class TestVisitor2(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
edited_ast = visit(
ast, ParallelVisitor([TestVisitor1(), TestVisitor2()]))
assert ast == parse('{ a, b, c { a, b, c } }', no_location=True)
assert edited_ast == parse('{ a, c { a, c } }', no_location=True)
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'operation_definition', None],
['leave', 'document', None]]
def allows_for_editing_on_leave():
ast = parse('{ a, b, c { a, b, c } }', no_location=True)
visited = []
# noinspection PyMethodMayBeStatic
class TestVisitor1(Visitor):
def leave(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = args[0]
if node.kind == 'field' and node.name.value == 'b':
return REMOVE
# noinspection PyMethodMayBeStatic
class TestVisitor2(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['enter', kind, value])
def leave(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
node = args[0]
kind, value = node.kind, getattr(node, 'value', None)
visited.append(['leave', kind, value])
edited_ast = visit(
ast, ParallelVisitor([TestVisitor1(), TestVisitor2()]))
assert ast == parse('{ a, b, c { a, b, c } }', no_location=True)
assert edited_ast == parse('{ a, c { a, c } }', no_location=True)
assert visited == [
['enter', 'document', None],
['enter', 'operation_definition', None],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['enter', 'selection_set', None],
['enter', 'field', None],
['enter', 'name', 'a'],
['leave', 'name', 'a'],
['leave', 'field', None],
['enter', 'field', None],
['enter', 'name', 'b'],
['leave', 'name', 'b'],
['enter', 'field', None],
['enter', 'name', 'c'],
['leave', 'name', 'c'],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'field', None],
['leave', 'selection_set', None],
['leave', 'operation_definition', None],
['leave', 'document', None]]
def describe_visit_with_type_info():
def maintains_type_info_during_visit():
visited = []
ast = parse(
'{ human(id: 4) { name, pets { ... { name } }, unknown } }')
type_info = TypeInfo(test_schema)
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args)
parent_type = type_info.get_parent_type()
type_ = type_info.get_type()
input_type = type_info.get_input_type()
node = args[0]
visited.append([
'enter', node.kind,
node.value if node.kind == 'name' else None,
str(parent_type) if parent_type else None,
str(type_) if type_ else None,
str(input_type) if input_type else None])
def leave(self, *args):
check_visitor_fn_args(ast, *args)
parent_type = type_info.get_parent_type()
type_ = type_info.get_type()
input_type = type_info.get_input_type()
node = args[0]
visited.append([
'leave', node.kind,
node.value if node.kind == 'name' else None,
str(parent_type) if parent_type else None,
str(type_) if type_ else None,
str(input_type) if input_type else None])
visit(ast, TypeInfoVisitor(type_info, TestVisitor()))
assert visited == [
['enter', 'document', None, None, None, None],
['enter', 'operation_definition', None, None, 'QueryRoot', None],
['enter', 'selection_set', None, 'QueryRoot', 'QueryRoot', None],
['enter', 'field', None, 'QueryRoot', 'Human', None],
['enter', 'name', 'human', 'QueryRoot', 'Human', None],
['leave', 'name', 'human', 'QueryRoot', 'Human', None],
['enter', 'argument', None, 'QueryRoot', 'Human', 'ID'],
['enter', 'name', 'id', 'QueryRoot', 'Human', 'ID'],
['leave', 'name', 'id', 'QueryRoot', 'Human', 'ID'],
['enter', 'int_value', None, 'QueryRoot', 'Human', 'ID'],
['leave', 'int_value', None, 'QueryRoot', 'Human', 'ID'],
['leave', 'argument', None, 'QueryRoot', 'Human', 'ID'],
['enter', 'selection_set', None, 'Human', 'Human', None],
['enter', 'field', None, 'Human', 'String', None],
['enter', 'name', 'name', 'Human', 'String', None],
['leave', 'name', 'name', 'Human', 'String', None],
['leave', 'field', None, 'Human', 'String', None],
['enter', 'field', None, 'Human', '[Pet]', None],
['enter', 'name', 'pets', 'Human', '[Pet]', None],
['leave', 'name', 'pets', 'Human', '[Pet]', None],
['enter', 'selection_set', None, 'Pet', '[Pet]', None],
['enter', 'inline_fragment', None, 'Pet', 'Pet', None],
['enter', 'selection_set', None, 'Pet', 'Pet', None],
['enter', 'field', None, 'Pet', 'String', None],
['enter', 'name', 'name', 'Pet', 'String', None],
['leave', 'name', 'name', 'Pet', 'String', None],
['leave', 'field', None, 'Pet', 'String', None],
['leave', 'selection_set', None, 'Pet', 'Pet', None],
['leave', 'inline_fragment', None, 'Pet', 'Pet', None],
['leave', 'selection_set', None, 'Pet', '[Pet]', None],
['leave', 'field', None, 'Human', '[Pet]', None],
['enter', 'field', None, 'Human', None, None],
['enter', 'name', 'unknown', 'Human', None, None],
['leave', 'name', 'unknown', 'Human', None, None],
['leave', 'field', None, 'Human', None, None],
['leave', 'selection_set', None, 'Human', 'Human', None],
['leave', 'field', None, 'QueryRoot', 'Human', None],
['leave', 'selection_set', None, 'QueryRoot', 'QueryRoot', None],
['leave', 'operation_definition', None, None, 'QueryRoot', None],
['leave', 'document', None, None, None, None],
]
def maintains_type_info_during_edit():
visited = []
type_info = TypeInfo(test_schema)
ast = parse('{ human(id: 4) { name, pets }, alien }')
# noinspection PyMethodMayBeStatic
class TestVisitor(Visitor):
def enter(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
parent_type = type_info.get_parent_type()
type_ = type_info.get_type()
input_type = type_info.get_input_type()
node = args[0]
visited.append([
'enter', node.kind,
node.value if node.kind == 'name' else None,
str(parent_type) if parent_type else None,
str(type_) if type_ else None,
str(input_type) if input_type else None])
# Make a query valid by adding missing selection sets.
if (node.kind == 'field' and not node.selection_set and
is_composite_type(get_named_type(type_))):
return FieldNode(
alias=node.alias,
name=node.name,
arguments=node.arguments,
directives=node.directives,
selection_set=SelectionSetNode(selections=[
FieldNode(name=NameNode(value='__typename'))]))
def leave(self, *args):
check_visitor_fn_args(ast, *args, is_edited=True)
parent_type = type_info.get_parent_type()
type_ = type_info.get_type()
input_type = type_info.get_input_type()
node = args[0]
visited.append([
'leave', node.kind,
node.value if node.kind == 'name' else None,
str(parent_type) if parent_type else None,
str(type_) if type_ else None,
str(input_type) if input_type else None])
edited_ast = visit(ast, TypeInfoVisitor(type_info, TestVisitor()))
assert ast == parse('{ human(id: 4) { name, pets }, alien }')
assert print_ast(edited_ast) == print_ast(parse(
'{ human(id: 4) { name, pets { __typename } },'
' alien { __typename } }'))
assert visited == [
['enter', 'document', None, None, None, None],
['enter', 'operation_definition', None, None, 'QueryRoot', None],
['enter', 'selection_set', None, 'QueryRoot', 'QueryRoot', None],
['enter', 'field', None, 'QueryRoot', 'Human', None],
['enter', 'name', 'human', 'QueryRoot', 'Human', None],
['leave', 'name', 'human', 'QueryRoot', 'Human', None],
['enter', 'argument', None, 'QueryRoot', 'Human', 'ID'],
['enter', 'name', 'id', 'QueryRoot', 'Human', 'ID'],
['leave', 'name', 'id', 'QueryRoot', 'Human', 'ID'],
['enter', 'int_value', None, 'QueryRoot', 'Human', 'ID'],
['leave', 'int_value', None, 'QueryRoot', 'Human', 'ID'],
['leave', 'argument', None, 'QueryRoot', 'Human', 'ID'],
['enter', 'selection_set', None, 'Human', 'Human', None],
['enter', 'field', None, 'Human', 'String', None],
['enter', 'name', 'name', 'Human', 'String', None],
['leave', 'name', 'name', 'Human', 'String', None],
['leave', 'field', None, 'Human', 'String', None],
['enter', 'field', None, 'Human', '[Pet]', None],
['enter', 'name', 'pets', 'Human', '[Pet]', None],
['leave', 'name', 'pets', 'Human', '[Pet]', None],
['enter', 'selection_set', None, 'Pet', '[Pet]', None],
['enter', 'field', None, 'Pet', 'String!', None],
['enter', 'name', '__typename', 'Pet', 'String!', None],
['leave', 'name', '__typename', 'Pet', 'String!', None],
['leave', 'field', None, 'Pet', 'String!', None],
['leave', 'selection_set', None, 'Pet', '[Pet]', None],
['leave', 'field', None, 'Human', '[Pet]', None],
['leave', 'selection_set', None, 'Human', 'Human', None],
['leave', 'field', None, 'QueryRoot', 'Human', None],
['enter', 'field', None, 'QueryRoot', 'Alien', None],
['enter', 'name', 'alien', 'QueryRoot', 'Alien', None],
['leave', 'name', 'alien', 'QueryRoot', 'Alien', None],
['enter', 'selection_set', None, 'Alien', 'Alien', None],
['enter', 'field', None, 'Alien', 'String!', None],
['enter', 'name', '__typename', 'Alien', 'String!', None],
['leave', 'name', '__typename', 'Alien', 'String!', None],
['leave', 'field', None, 'Alien', 'String!', None],
['leave', 'selection_set', None, 'Alien', 'Alien', None],
['leave', 'field', None, 'QueryRoot', 'Alien', None],
['leave', 'selection_set', None, 'QueryRoot', 'QueryRoot', None],
['leave', 'operation_definition', None, None, 'QueryRoot', None],
['leave', 'document', None, None, None, None],
]
| 43.484804
| 79
| 0.477779
| 5,657
| 58,661
| 4.818632
| 0.034117
| 0.065043
| 0.042445
| 0.028064
| 0.891889
| 0.838806
| 0.811182
| 0.788767
| 0.770865
| 0.766279
| 0
| 0.004738
| 0.334396
| 58,661
| 1,348
| 80
| 43.517062
| 0.693405
| 0.016996
| 0
| 0.789518
| 0
| 0
| 0.25724
| 0
| 0
| 0
| 0
| 0
| 0.042265
| 1
| 0.062553
| false
| 0
| 0.005917
| 0
| 0.106509
| 0.001691
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be5fe5c3c91232e99c8d8a4015095e70e6307ac1
| 86
|
py
|
Python
|
dndtools/django_filters2/__init__.py
|
dndtools/dndtools
|
aeafebb78f35249c9b2a941dac9a9541c6e7b190
|
[
"MIT"
] | 55
|
2015-01-05T12:47:14.000Z
|
2021-12-19T21:46:35.000Z
|
dndtools/django_filters2/__init__.py
|
dndtools/dndtools
|
aeafebb78f35249c9b2a941dac9a9541c6e7b190
|
[
"MIT"
] | 18
|
2015-01-04T12:11:01.000Z
|
2018-04-05T04:17:06.000Z
|
dndtools/django_filters2/__init__.py
|
dndtools/dndtools
|
aeafebb78f35249c9b2a941dac9a9541c6e7b190
|
[
"MIT"
] | 34
|
2015-01-04T11:39:47.000Z
|
2020-03-14T00:17:49.000Z
|
from django_filters2.filterset import FilterSet
from django_filters2.filters import *
| 28.666667
| 47
| 0.872093
| 11
| 86
| 6.636364
| 0.545455
| 0.273973
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.093023
| 86
| 2
| 48
| 43
| 0.910256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be62fd64111e2167f2ee698d1be7e8a62cad7295
| 118
|
py
|
Python
|
IntroducingPython/Unit/2/2/4/__init__.py
|
GitHubToMaster/Python-Learn
|
396a7d7525744afe3f4fba29e15b28e8c07f2329
|
[
"MIT"
] | null | null | null |
IntroducingPython/Unit/2/2/4/__init__.py
|
GitHubToMaster/Python-Learn
|
396a7d7525744afe3f4fba29e15b28e8c07f2329
|
[
"MIT"
] | null | null | null |
IntroducingPython/Unit/2/2/4/__init__.py
|
GitHubToMaster/Python-Learn
|
396a7d7525744afe3f4fba29e15b28e8c07f2329
|
[
"MIT"
] | null | null | null |
# 类型转换
print(int(True))
print(int(False))
print(int(10.5)) # 只保留整数位
# int无法接受包含 小数点或者指数的字符串
print(int("10.5"))
| 16.857143
| 32
| 0.652542
| 18
| 118
| 4.277778
| 0.555556
| 0.415584
| 0.25974
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.152542
| 118
| 7
| 33
| 16.857143
| 0.71
| 0.279661
| 0
| 0
| 0
| 0
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
be9c27c3061de3f80f535ebcb9610ac037c18ea5
| 284
|
py
|
Python
|
dec_me/decme_last.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
dec_me/decme_last.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
dec_me/decme_last.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
import base64
print(base64.b64decode('=oQKikXYq5WYigCIvt2Vvt2VKogCpQHelRHKg8GdulmcwBCIgACIgACIKoTK0hXZ0xiZsV2coAyXfRXaul2XfBiZlRGIgACIKozbrd1brdFIzNXYsNmCKQHelRHIuJXd0VmcgACIgoQK0hXZ0hCI05WayBHIgACIKoTK0hXZ0hCIvRnbpJHcgYWZkpgbvlGdj5Wdm9FdulmcwBCdy9GctlGIf9VZyVHd1Z2XfBSbvJnZ'[::-1]))
| 94.666667
| 269
| 0.943662
| 7
| 284
| 38.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103203
| 0.010563
| 284
| 2
| 270
| 142
| 0.850534
| 0
| 0
| 0
| 0
| 0
| 0.830986
| 0.830986
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 10
|
fe86cc506990dbc9de535de9bfa7989c2a17b659
| 9,732
|
py
|
Python
|
shared_stuff.py
|
siilmari/gw_research
|
eb6995be0a65f059306cf3e3b9341f0501e0f1f2
|
[
"MIT"
] | 1
|
2019-03-22T08:59:18.000Z
|
2019-03-22T08:59:18.000Z
|
shared_stuff.py
|
siilmari/gw_research
|
eb6995be0a65f059306cf3e3b9341f0501e0f1f2
|
[
"MIT"
] | null | null | null |
shared_stuff.py
|
siilmari/gw_research
|
eb6995be0a65f059306cf3e3b9341f0501e0f1f2
|
[
"MIT"
] | null | null | null |
# This file contains the key variables and functions used in all other code files.
# All the variables, functions, etc. will be imported into all the other code files
# at the beginning.
import numpy as np
import math as m
from astropy.cosmology import Planck15
# Number of parameters to be estimated (A and alpha in this case):
ndim = 2
# Number of datasets to be generated and/or analysed:
n_data = 15
# Fix the parameter p_1:
p019 = -0.15
# Mass will be expressed in terms of 3 * 10^6 solar masses - define the scaling factor:
mass_scale_f = 3e6
# -----
# Now we will define the parameter ranges, bin widths, etc. of the log mass (lnM) and
# redshift (z) for the binned analysis:
# -----
lnM_lower = m.log(1e4/mass_scale_f) # lower end of the parameter range (approx 9.3)
lnM_upper = m.log(1e7/mass_scale_f) # upper end of the parameter range (approx 16.1)
lnM_nbins = 20 # number of bins
dlnM = (lnM_upper - lnM_lower)/lnM_nbins # bin width
# Similarly for z:
z_lower = 0.05
z_upper = 1.2
z_nbins = 20
dz = (z_upper - z_lower)/z_nbins
# Total number of bins:
n_bins = lnM_nbins*z_nbins
# -----
# Here we will define some functions and variables for defining the likelihood for our model:
# -----
# The main EMRI rate function, which calculates the intrinsic EMRI rate of a given bin.
# --Input--
# muu - vector of parameters
# lnM, z - lower ends of a particular bin
# --Output--
# The expected EMRI rate for the bin
def rate(muu, lnM, z):
return r0(muu, lnM+dlnM/2) * muu[0] * m.exp(muu[1] * (lnM+dlnM/2)) * dlnM * dz
# note that we add half of the bin width to lnM to calculate the rate for the midpoint of the bin
# we will multiply with the comoving volume and observable lifetime separately (see volume_obslife_factors below)
# The R_0 factor for the EMRI rate function.
# --Input--
# muuu - vector of parameters
# lnM - log mass
# --Output--
# R_0
def r0(muuu, lnM):
return 400*(3**p019)*m.exp(p019*lnM)
# here, we should divide by 10e9 to get the rate in y^(-1) instead of Gyr^(-1);
# however, we divide the differential comoving volume by that instead (and get it in Gpc instead of Mpc),
# so it cancels out!
# The observable lifetime function - the result will be multiplied with the EMRI rate vector separately each time
# (see volume_obslife_factors below)
# --Input--
# lnM, z - log mass, redshift
# --Output--
# The observable lifetime (float)
def ObsLifeSpin4(lnM, z):
zlist = [0.05,0.1,0.15,0.2,0.25,0.3,0.35,0.4,0.45,0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95,1,1.05,1.1,1.15,1.2,1.25,1.3,1.35,1.4,1.45,1.5,1.55,1.6,1.65,1.7,1.75,1.8,1.85,1.9,1.95,2,2.05,2.1,2.15,2.2,2.25,2.3,2.35,2.4,2.45,2.5,2.55,2.6,2.65,2.7,2.75,2.8,2.85,2.9,2.95,3]
Mlist = [10000,12996.6,16891.1,21952.6,28530.8,37080.3,48191.6,62632.6,81400.9,105793,137495,178696,232243,301837,392284,509835,662610,861165,1.11922e+06,1.4546e+06,1.89048e+06,2.45698e+06,3.19323e+06,4.1501e+06,5.3937e+06,7.00996e+06,9.11054e+06,1.18406e+07,1.53887e+07,2e+07]
obslife = [[23.8889,12.5758,6.66667,3.68056,1.93333,0.769231,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[22.1429,12.8788,7.3913,4.51389,2.73333,1.53846,0.740741,0.0595238,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[20.3968,12.5758,7.97101,5.13889,3.4,2.24359,1.48148,0.77381,0.287356,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[18.8095,11.9697,8.11594,5.625,3.86667,2.75641,1.97531,1.36905,0.862069,0.5,0.0537634,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[17.381,11.1364,8.04348,5.76389,4.2,3.14103,2.40741,1.84524,1.32184,1,0.645161,0.3125,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[16.1905,10.3788,7.6087,5.83333,4.46667,3.46154,2.71605,2.14286,1.72414,1.38889,1.07527,0.78125,0.505051,0.245098,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[15.1587,9.62121,7.17391,5.625,4.46667,3.58974,2.90123,2.38095,1.95402,1.61111,1.34409,1.09375,0.909091,0.686275,0.47619,0.277778,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[14.3651,8.93939,6.73913,5.34722,4.33333,3.52564,2.96296,2.5,2.12644,1.77778,1.55914,1.35417,1.16162,0.931373,0.809524,0.648148,0.45045,0.263158,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[13.8095,8.25758,6.23188,5,4.13333,3.46154,2.96296,2.5,2.18391,1.88889,1.66667,1.45833,1.31313,1.12745,1,0.87963,0.720721,0.614035,0.42735,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[13.3333,7.80303,5.7971,4.65278,3.86667,3.33333,2.83951,2.5,2.12644,1.88889,1.72043,1.51042,1.36364,1.22549,1.14286,1.01852,0.900901,0.789474,0.683761,0.541667,0.406504,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[12.8571,7.34848,5.36232,4.30556,3.6,3.14103,2.71605,2.38095,2.12644,1.88889,1.66667,1.51042,1.36364,1.27451,1.19048,1.06481,0.945946,0.877193,0.811966,0.666667,0.569106,0.436508,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[12.4603,7.04545,5.07246,4.02778,3.4,2.88462,2.53086,2.2619,2.01149,1.83333,1.6129,1.51042,1.36364,1.27451,1.14286,1.06481,0.990991,0.921053,0.854701,0.75,0.650407,0.515873,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[11.9841,6.74242,4.78261,3.75,3.13333,2.69231,2.40741,2.14286,1.89655,1.77778,1.6129,1.45833,1.31313,1.22549,1.19048,1.06481,0.990991,0.921053,0.811966,0.75,0.650407,0.0396825,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[11.5873,6.43939,4.49275,3.54167,2.93333,2.5,2.22222,1.96429,1.78161,1.66667,1.50538,1.35417,1.26263,1.17647,1.14286,1.01852,0.990991,0.877193,0.769231,0.583333,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[11.1905,6.06061,4.27536,3.26389,2.73333,2.30769,2.03704,1.84524,1.66667,1.55556,1.45161,1.30208,1.21212,1.12745,1.04762,0.972222,0.855856,0.526316,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[10.7937,5.60606,3.91304,3.05556,2.46667,2.11538,1.85185,1.66667,1.55172,1.44444,1.29032,1.19792,1.11111,1.02941,0.809524,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[10.2381,5.15152,3.55072,2.77778,2.26667,1.92308,1.7284,1.54762,1.43678,1.27778,1.12903,0.885417,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[9.60317,4.62121,3.18841,2.43056,2,1.73077,1.54321,1.36905,1.03448,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[8.88889,4.09091,2.68116,2.08333,1.66667,1.34615,0.246914,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[7.93651,3.40909,2.24638,1.59722,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[6.8254,2.65152,1.30435,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[5.39683,1.21212,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[3.65079,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[1.34921,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]]
NM=30
Nz=60
M = mass_scale_f * m.exp(lnM)
i=0
if M < Mlist[0]:
i = 1
print "Warning - mass below minimal mass in function!"
elif M > Mlist[NM-1]:
i = NM - 1
print "Warning - mass above maximal mass in function!"
else:
while Mlist[i] <= M and i < NM-1:
i += 1
j = 0
if z < zlist[0]:
j = 1
print "Warning - redshift below minimal redshift in function!"
elif z > zlist[Nz-1]:
j = Nz - 1
print "Warning - redshift above maximal redshift in function!"
else:
while zlist[j] <= z and j < Nz-1:
j += 1
life = obslife[i-1][j-1] * (Mlist[i] - M) * (zlist[j] - z)
life += obslife[i][j-1] * (M-Mlist[i-1]) * (zlist[j] - z)
life += obslife[i-1][j] * (Mlist[i]-M) * (z - zlist[j-1])
life += obslife[i][j] * (M-Mlist[i-1]) * (z - zlist[j-1])
life /= (Mlist[i] - Mlist[i-1]) * (zlist[j] - zlist[j-1])
return life
# Generate a vector of values which will be multiplied with the corresponding EMRI rate in each bin.
# These factors represent the differential comoving volumes and observable lifetimes.
volume_obslife_factors = list()
for k in range(lnM_nbins):
for j in range(z_nbins):
lnM_eff = lnM_lower + k*dlnM + dlnM/2 # add half of the bin width to get the midpoint
z_eff = z_lower + j*dz + dz/2
volume_obslife_factors.append(ObsLifeSpin4(lnM_eff, z_eff) * Planck15.differential_comoving_volume(z_eff).value / 10e9 * 4 * m.pi)
| 81.1
| 5,398
| 0.638923
| 2,936
| 9,732
| 2.10218
| 0.157357
| 0.480233
| 0.708684
| 0.929358
| 0.338302
| 0.282242
| 0.267012
| 0.246922
| 0.246922
| 0.246922
| 0
| 0.408833
| 0.076346
| 9,732
| 119
| 5,399
| 81.781513
| 0.277784
| 0.204172
| 0
| 0.033898
| 0
| 0
| 0.025984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.050847
| null | null | 0.067797
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
feae02da5ca7299592abb6736725af423beec9d0
| 141,854
|
py
|
Python
|
pmagpy/gufm.py
|
schwehr/PmagPy
|
5e9edc5dc9a7a243b8e7f237fa156e0cd782076b
|
[
"BSD-3-Clause"
] | 2
|
2020-07-05T01:11:33.000Z
|
2020-07-05T01:11:39.000Z
|
pmagpy/gufm.py
|
schwehr/PmagPy
|
5e9edc5dc9a7a243b8e7f237fa156e0cd782076b
|
[
"BSD-3-Clause"
] | null | null | null |
pmagpy/gufm.py
|
schwehr/PmagPy
|
5e9edc5dc9a7a243b8e7f237fa156e0cd782076b
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import print_function
def coeffs(date):
if date < 1600:
print("date too early")
sys.exit()
elif date < 1605:
return [ -35862, -2821, 1515, 664, 2117, 2425, -2024, -3206, 1369, -828, -828, 831, -275, -105, -451, 389, 287, -14, 322, -727, 221, -185, 236, 91, 26, 124, -12, 111, -21, -225, -31, 86, -62, 330, 135, 43, -11, -14, 72, 4, -0, -16, -90, -16, 57, 0, 43, 91, 15, 0, -0, 7, -7, 12, -8, -2, -12, -18, 28, 21, 1, -31, -9, 1, 1, 1, -1, -2, 3, -1, -0, -5, -3, 2, -7, 8, 3, -2, -1, -18, 0, 0, 0, -0, -0, 0, -1, 0, 0, 0, -2, -3, 1, 1, 2, -1, -1, 2, -2, 0, 0, -0, 0, 0, -0, -0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 1, -1, -1, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0]
elif date < 1610:
return [ -35786, -2918, 1669, 556, 2086, 2353, -2135, -3113, 1297, -799, -825, 798, -268, -121, -425, 379, 303, -0, 327, -737, 227, -186, 243, 72, 33, 128, -14, 115, -18, -233, -18, 83, -57, 337, 119, 43, -10, -14, 72, 5, -9, -19, -87, -8, 49, -1, 45, 88, 14, 1, -0, 6, -6, 11, -8, -3, -13, -14, 29, 18, 1, -31, -5, 1, 1, 1, -1, -2, 3, -1, -0, -4, -3, 2, -7, 7, 2, -1, -2, -17, 0, 0, 0, -0, -0, 0, -1, -0, 0, 0, -1, -2, 1, 1, 1, -1, -1, 2, -2, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 1, -1, -1, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0]
elif date < 1615:
return [ -35709, -3001, 1817, 454, 2050, 2281, -2223, -3016, 1226, -772, -821, 766, -261, -136, -396, 369, 318, 13, 332, -748, 233, -188, 249, 52, 40, 132, -16, 120, -17, -241, -6, 79, -52, 343, 103, 43, -9, -14, 71, 5, -17, -23, -85, 0, 40, -2, 47, 85, 13, 1, -0, 5, -6, 11, -8, -4, -14, -10, 30, 15, 0, -31, -1, 1, 1, 1, -1, -2, 3, -1, -0, -4, -2, 2, -6, 7, 2, -0, -3, -16, 0, 0, 0, -0, -0, 0, -1, -0, 0, 0, -1, -2, 1, 1, 1, -2, -1, 2, -2, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 1, -1, -1, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, -0]
elif date < 1620:
return [ -35632, -3057, 1950, 359, 2013, 2210, -2280, -2912, 1158, -744, -816, 737, -255, -148, -363, 359, 333, 27, 336, -758, 238, -192, 251, 31, 47, 136, -18, 126, -16, -248, 4, 76, -48, 348, 86, 43, -7, -13, 70, 6, -26, -27, -82, 8, 31, -3, 49, 82, 12, 2, -0, 4, -6, 11, -8, -5, -15, -6, 31, 11, -0, -31, 3, 1, 1, 1, -1, -1, 3, -1, -0, -3, -2, 2, -5, 7, 1, 0, -4, -14, -0, 0, 0, -0, -0, 0, -1, -0, 0, 0, -1, -2, 1, 1, 1, -2, -1, 2, -2, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 1, -0, -1, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, -0]
elif date < 1625:
return [ -35556, -3083, 2080, 270, 1978, 2143, -2304, -2802, 1093, -715, -809, 712, -249, -158, -326, 350, 348, 41, 340, -767, 243, -197, 251, 10, 54, 140, -20, 132, -15, -254, 14, 72, -44, 352, 69, 43, -6, -13, 69, 7, -35, -31, -79, 16, 23, -4, 50, 79, 11, 2, -0, 3, -6, 10, -8, -6, -16, -2, 31, 8, -1, -31, 8, 0, 2, 0, -1, -1, 3, -1, -0, -3, -2, 1, -5, 6, 1, 1, -4, -13, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -1, -2, 1, 1, 1, -2, -1, 1, -2, -0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 1, -0, -1, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, -0]
elif date < 1630:
return [ -35479, -3095, 2210, 190, 1946, 2085, -2298, -2691, 1032, -686, -801, 691, -242, -163, -287, 341, 362, 55, 344, -777, 249, -203, 249, -11, 61, 144, -21, 140, -15, -259, 23, 68, -41, 353, 52, 42, -5, -13, 68, 7, -43, -35, -75, 23, 14, -5, 51, 76, 10, 3, -0, 2, -6, 10, -8, -8, -17, 2, 32, 5, -1, -31, 12, 0, 2, 0, -1, -1, 3, -1, -0, -2, -1, 1, -4, 6, 0, 2, -5, -12, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -1, -2, 1, 1, 0, -2, -1, 1, -2, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 1, -0, -1, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, -0]
elif date < 1635:
return [ -35403, -3110, 2338, 118, 1919, 2034, -2270, -2581, 975, -655, -792, 675, -233, -163, -247, 333, 376, 68, 347, -785, 254, -211, 244, -32, 67, 148, -22, 149, -16, -262, 31, 64, -39, 353, 35, 42, -4, -13, 67, 8, -51, -40, -72, 31, 5, -7, 51, 73, 9, 3, -1, 1, -6, 9, -7, -9, -17, 5, 32, 1, -1, -31, 16, 0, 2, 0, -1, -1, 3, -1, -0, -2, -1, 1, -3, 6, -0, 3, -6, -10, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, -1, 0, 1, 0, -2, -0, 1, -2, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, -0, -1, 0, 0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, -0]
elif date < 1640:
return [ -35327, -3138, 2460, 55, 1899, 1988, -2231, -2473, 921, -624, -782, 665, -225, -161, -207, 326, 390, 81, 351, -792, 258, -219, 236, -53, 73, 152, -23, 158, -17, -265, 38, 60, -37, 351, 17, 42, -2, -13, 66, 10, -59, -44, -68, 38, -3, -7, 50, 69, 8, 3, -1, -1, -5, 9, -6, -10, -18, 9, 33, -2, -1, -31, 21, 0, 2, 0, -1, -1, 3, -1, -1, -1, -1, 0, -3, 5, -1, 4, -6, -9, -0, 0, 0, 0, 0, 1, -0, -1, 0, -0, -0, -1, 0, 1, -0, -2, -0, 1, -1, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0, -0, -1, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1645:
return [ -35250, -3179, 2579, 3, 1887, 1946, -2193, -2366, 873, -592, -770, 660, -215, -159, -169, 319, 405, 93, 353, -797, 264, -227, 225, -74, 79, 156, -24, 169, -19, -267, 44, 56, -36, 346, 0, 42, -1, -12, 64, 11, -67, -49, -64, 45, -11, -8, 49, 66, 7, 4, -1, -2, -5, 8, -6, -12, -19, 13, 33, -6, -1, -31, 25, 1, 2, 0, -1, -1, 3, -1, -1, -1, -1, 0, -2, 5, -1, 5, -7, -8, -0, 0, 0, 0, 0, 1, -0, -1, 0, -0, -0, -1, 0, 1, -1, -2, -0, 1, -1, -0, -0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, 0, 0, -0, -1, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1650:
return [ -35174, -3225, 2701, -38, 1883, 1909, -2160, -2259, 829, -560, -757, 660, -204, -156, -134, 313, 420, 104, 355, -802, 268, -235, 211, -94, 84, 160, -25, 181, -21, -267, 48, 52, -35, 340, -17, 42, -0, -12, 63, 12, -75, -53, -60, 51, -19, -8, 47, 62, 7, 4, -1, -3, -5, 7, -5, -13, -20, 16, 32, -9, -1, -30, 29, 1, 2, 0, -0, -1, 3, -1, -1, -0, -0, -0, -2, 4, -2, 6, -7, -7, -0, 0, 0, 0, 0, 1, 0, -1, 0, -1, 0, -1, 0, 1, -1, -2, -0, 0, -1, -0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, 0, -0, -1, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1655:
return [ -35097, -3266, 2824, -66, 1886, 1876, -2129, -2151, 790, -528, -743, 663, -193, -153, -101, 308, 435, 114, 356, -805, 272, -243, 195, -114, 89, 165, -26, 193, -23, -266, 52, 47, -34, 332, -34, 42, 1, -12, 61, 13, -83, -58, -56, 58, -27, -8, 45, 58, 6, 4, -1, -5, -4, 7, -3, -15, -20, 20, 32, -12, 0, -30, 33, 1, 2, 0, -0, -1, 2, -1, -1, 0, -0, -0, -2, 4, -2, 7, -8, -5, -1, 0, 0, 0, 0, 1, 0, -1, 0, -1, 0, -0, 0, 1, -1, -2, 0, 0, -1, -0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, 0, -0, -1, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1660:
return [ -35021, -3303, 2933, -80, 1895, 1847, -2100, -2044, 756, -495, -727, 669, -179, -149, -68, 304, 450, 123, 357, -808, 274, -250, 175, -135, 93, 170, -27, 207, -26, -264, 54, 42, -33, 322, -50, 42, 2, -11, 60, 15, -90, -63, -52, 64, -34, -8, 42, 55, 6, 4, -1, -6, -4, 6, -2, -17, -21, 23, 31, -16, 1, -29, 37, 1, 2, 0, 0, -1, 2, -0, -1, 1, 0, -0, -1, 3, -2, 8, -8, -4, -1, 1, 0, 1, 0, 1, 0, -1, 0, -1, 1, 0, 0, 1, -2, -2, 0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, 0, -0, 0, -0, -1, 1, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1665:
return [ -34945, -3342, 3017, -81, 1911, 1821, -2074, -1940, 729, -462, -712, 676, -162, -146, -37, 300, 467, 132, 358, -809, 274, -255, 154, -154, 97, 175, -27, 222, -29, -261, 55, 37, -32, 311, -66, 42, 4, -11, 58, 17, -97, -67, -48, 70, -41, -7, 39, 51, 6, 4, -1, -7, -4, 6, -1, -18, -21, 26, 30, -19, 2, -28, 40, 1, 2, 0, 0, -1, 2, -0, -1, 1, 0, -1, -1, 2, -3, 9, -9, -3, -1, 1, 0, 1, 0, 1, 1, -2, 0, -1, 1, 0, 0, 1, -2, -2, 0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 0, 1, 0, -0, -0, -0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1670:
return [ -34868, -3387, 3092, -69, 1933, 1794, -2051, -1835, 706, -428, -697, 687, -142, -144, -4, 298, 483, 139, 359, -810, 273, -259, 130, -173, 100, 181, -29, 238, -32, -256, 55, 32, -32, 298, -82, 42, 5, -10, 57, 18, -103, -71, -44, 75, -47, -6, 35, 47, 6, 4, -2, -9, -3, 5, 1, -20, -21, 29, 29, -22, 3, -28, 44, 2, 2, 0, 0, -1, 2, 0, -1, 2, 0, -1, -0, 2, -3, 10, -9, -2, -1, 1, 0, 1, 0, 1, 1, -2, 0, -1, 1, 1, 0, 0, -3, -2, 0, -1, 0, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 0, 1, 0, -0, -0, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1675:
return [ -34792, -3432, 3176, -45, 1961, 1760, -2032, -1718, 690, -395, -683, 700, -121, -145, 33, 295, 500, 146, 359, -810, 270, -263, 103, -190, 102, 187, -30, 255, -35, -251, 55, 26, -31, 285, -96, 42, 6, -10, 55, 20, -110, -75, -40, 80, -53, -4, 32, 44, 5, 4, -2, -10, -3, 5, 3, -22, -22, 32, 28, -25, 5, -26, 47, 2, 2, 0, 1, -1, 1, 0, -1, 2, 0, -1, -0, 1, -3, 12, -9, -1, -1, 1, -0, 1, 0, 1, 1, -2, 0, -1, 1, 1, 0, 0, -3, -2, 1, -1, 0, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 0, 1, 0, -0, -0, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1680:
return [ -34716, -3484, 3267, -7, 1997, 1719, -2018, -1588, 679, -363, -671, 717, -98, -151, 75, 294, 517, 151, 360, -809, 264, -267, 76, -206, 103, 194, -31, 272, -38, -245, 53, 20, -32, 270, -109, 41, 7, -9, 53, 23, -116, -79, -36, 84, -58, -2, 28, 40, 6, 4, -2, -12, -2, 5, 5, -24, -22, 35, 26, -27, 6, -25, 50, 2, 2, 0, 1, -1, 1, 1, -1, 3, 1, -1, -0, 1, -3, 13, -9, -1, -1, 1, -0, 1, 0, 1, 1, -2, 0, -1, 1, 1, 0, 0, -3, -2, 1, -1, 1, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 0, 1, 0, -0, -0, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1685:
return [ -34639, -3548, 3343, 43, 2040, 1673, -2009, -1450, 673, -330, -660, 737, -72, -162, 124, 294, 534, 156, 362, -808, 257, -269, 46, -218, 103, 202, -33, 289, -41, -237, 50, 14, -33, 255, -121, 41, 9, -8, 52, 25, -121, -83, -32, 88, -62, -0, 24, 37, 6, 3, -3, -13, -1, 4, 7, -25, -22, 37, 25, -29, 8, -24, 53, 3, 1, 0, 1, -1, 1, 1, -1, 3, 1, -1, 0, -0, -4, 14, -9, 0, -1, 1, -0, 0, 0, 1, 1, -2, 0, -1, 2, 2, 0, 0, -4, -2, 1, -1, 1, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 0, 1, 0, -0, -1, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1690:
return [ -34563, -3604, 3399, 103, 2090, 1623, -2000, -1307, 672, -298, -653, 762, -41, -177, 180, 295, 550, 159, 364, -807, 247, -271, 15, -227, 103, 211, -35, 307, -44, -229, 47, 8, -34, 239, -130, 41, 10, -8, 50, 27, -127, -86, -28, 91, -65, 2, 19, 35, 6, 3, -3, -14, -1, 4, 10, -27, -21, 39, 23, -31, 10, -22, 55, 3, 1, 0, 1, -2, 0, 1, -1, 4, 1, -1, 0, -1, -4, 15, -9, 1, -1, 1, -0, 0, 0, 2, 2, -3, 0, -1, 2, 2, 0, 0, -4, -2, 1, -1, 1, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 1, 1, 0, -0, -1, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1695:
return [ -34487, -3630, 3455, 169, 2146, 1572, -1984, -1158, 675, -268, -650, 791, -7, -196, 244, 297, 565, 161, 367, -806, 235, -271, -16, -233, 101, 220, -37, 324, -46, -219, 43, 1, -37, 223, -137, 41, 11, -7, 48, 30, -132, -88, -24, 94, -68, 4, 15, 32, 7, 2, -4, -16, -0, 4, 12, -29, -21, 41, 21, -33, 12, -20, 57, 4, 1, 0, 2, -2, -0, 2, -2, 4, 1, -0, 0, -1, -4, 16, -8, 1, -1, 1, -0, 0, -0, 2, 2, -3, 0, -2, 2, 3, 0, 0, -5, -2, 1, -1, 2, -0, -0, -0, 0, -0, 0, 0, -0, 0, -1, 0, -0, 1, 1, 0, -0, -1, 0, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1700:
return [ -34411, -3620, 3517, 237, 2205, 1520, -1959, -1010, 681, -242, -651, 824, 28, -218, 310, 301, 578, 160, 370, -805, 222, -272, -47, -236, 98, 230, -39, 340, -48, -209, 38, -5, -40, 207, -141, 41, 12, -5, 47, 33, -137, -91, -21, 97, -70, 7, 12, 31, 7, 2, -4, -17, 1, 3, 15, -30, -21, 43, 19, -35, 14, -18, 59, 4, 1, 0, 2, -2, -1, 2, -2, 5, 1, -0, 0, -2, -4, 17, -8, 1, -1, 2, -1, 0, -0, 2, 2, -3, 0, -2, 2, 3, 0, 0, -5, -2, 1, -2, 2, -0, -0, -0, 0, -0, -0, 0, -0, 0, -1, 0, -0, 1, 1, 1, -0, -1, 1, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1705:
return [ -34335, -3584, 3583, 304, 2265, 1463, -1929, -871, 689, -221, -656, 862, 60, -242, 378, 305, 589, 157, 372, -803, 209, -273, -76, -234, 95, 240, -42, 355, -50, -198, 33, -12, -43, 191, -143, 41, 14, -4, 45, 35, -142, -92, -17, 99, -72, 9, 8, 30, 8, 1, -5, -18, 1, 3, 18, -32, -20, 45, 17, -36, 16, -15, 60, 5, 0, 0, 2, -3, -1, 3, -2, 5, 1, 0, 0, -2, -3, 18, -7, 1, -2, 2, -1, 0, -0, 2, 3, -4, 0, -2, 3, 3, 0, 0, -6, -2, 1, -2, 2, -0, -0, -0, 0, -0, -0, 0, -0, 0, -1, 0, -0, 1, 1, 1, -0, -1, 1, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1710:
return [ -34259, -3534, 3660, 368, 2324, 1404, -1895, -748, 699, -205, -667, 904, 86, -267, 444, 311, 597, 151, 372, -801, 194, -276, -103, -229, 91, 251, -46, 369, -51, -186, 27, -19, -47, 174, -141, 41, 15, -2, 43, 38, -147, -94, -14, 100, -72, 12, 5, 29, 9, 0, -5, -20, 2, 3, 20, -34, -19, 46, 15, -37, 17, -12, 61, 5, -0, 0, 2, -3, -2, 3, -1, 6, 1, 0, 0, -3, -3, 19, -7, 1, -2, 2, -1, 0, -0, 2, 3, -4, 0, -2, 3, 4, 0, 0, -6, -2, 1, -2, 3, -1, -0, -0, -0, -0, -0, 0, -0, 0, -1, 0, -0, 1, 2, 1, -0, -1, 1, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1715:
return [ -34183, -3493, 3776, 427, 2379, 1346, -1858, -641, 710, -193, -684, 946, 104, -289, 508, 317, 604, 141, 371, -799, 178, -280, -125, -220, 86, 262, -50, 381, -52, -174, 21, -26, -50, 158, -137, 42, 16, 0, 42, 40, -151, -94, -12, 101, -72, 14, 2, 30, 11, -0, -6, -21, 3, 3, 23, -35, -18, 47, 12, -37, 19, -9, 62, 6, -0, 0, 3, -3, -2, 4, -1, 6, 1, 1, -0, -3, -3, 20, -6, 1, -2, 2, -1, 0, -1, 2, 3, -4, 0, -2, 3, 4, 1, 1, -7, -2, 1, -2, 3, -1, -0, -0, -0, -1, -0, 0, 0, 0, -1, 1, -0, 1, 2, 1, -0, -1, 1, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, -0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0]
elif date < 1720:
return [ -34106, -3467, 3945, 480, 2425, 1290, -1812, -537, 721, -187, -707, 987, 115, -303, 573, 323, 609, 128, 368, -796, 161, -284, -142, -210, 80, 273, -55, 392, -52, -162, 15, -33, -53, 143, -131, 43, 18, 3, 40, 43, -156, -94, -9, 101, -71, 17, 0, 30, 12, -1, -6, -22, 4, 3, 26, -37, -18, 48, 10, -37, 21, -5, 62, 7, -1, 0, 3, -4, -3, 4, -1, 6, 0, 2, -0, -4, -3, 20, -5, 1, -2, 2, -1, -0, -1, 3, 4, -4, 0, -2, 3, 4, 1, 1, -7, -2, 0, -2, 3, -1, -0, -0, -0, -1, -0, 0, 0, 0, -1, 1, -0, 1, 2, 1, 0, -1, 1, -1, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, -0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1725:
return [ -34030, -3441, 4143, 529, 2458, 1240, -1753, -429, 734, -185, -733, 1025, 122, -310, 637, 330, 612, 111, 364, -791, 144, -288, -153, -199, 74, 284, -61, 403, -52, -150, 9, -40, -54, 130, -125, 44, 19, 6, 38, 45, -161, -94, -7, 100, -70, 21, -1, 31, 14, -2, -7, -23, 5, 3, 29, -39, -17, 48, 8, -37, 22, -1, 62, 7, -1, 0, 3, -4, -3, 5, -1, 7, 0, 2, -0, -4, -2, 21, -4, 0, -2, 2, -1, -0, -1, 3, 4, -5, -0, -2, 4, 5, 1, 1, -7, -2, 0, -2, 4, -1, -0, -0, -0, -1, -0, 0, 0, 0, -2, 1, -0, 1, 2, 1, 0, -1, 1, -2, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, -0, -0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1730:
return [ -33954, -3400, 4316, 573, 2472, 1194, -1686, -327, 747, -188, -761, 1058, 127, -312, 701, 337, 615, 91, 358, -785, 126, -291, -159, -190, 67, 295, -68, 412, -51, -138, 3, -46, -54, 118, -120, 45, 20, 9, 35, 46, -165, -93, -6, 99, -68, 25, -2, 31, 16, -3, -7, -24, 6, 4, 32, -41, -16, 49, 5, -37, 23, 3, 61, 8, -1, 0, 3, -5, -4, 5, -1, 7, 0, 3, -1, -4, -2, 21, -3, -0, -2, 3, -1, -0, -1, 3, 5, -5, -0, -2, 4, 5, 1, 1, -8, -2, 0, -2, 4, -1, -0, -0, -0, -1, -1, 0, 0, 0, -2, 1, 0, 1, 2, 1, 0, -2, 1, -2, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 0, -0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1735:
return [ -33878, -3342, 4419, 609, 2464, 1149, -1614, -233, 761, -196, -790, 1084, 132, -313, 762, 344, 616, 69, 351, -777, 109, -292, -163, -183, 60, 305, -75, 420, -51, -127, -2, -50, -52, 107, -115, 47, 21, 13, 33, 48, -169, -92, -5, 97, -65, 30, -1, 31, 18, -4, -8, -25, 7, 4, 35, -42, -15, 49, 3, -36, 25, 7, 59, 9, -2, 0, 4, -6, -5, 6, -1, 7, 0, 4, -1, -4, -1, 21, -2, -1, -2, 3, -2, -0, -1, 3, 5, -5, -0, -2, 5, 5, 1, 1, -8, -2, -0, -3, 4, -1, -0, -0, -0, -1, -1, 0, 0, 0, -2, 1, 0, 1, 2, 1, 0, -2, 1, -2, 1, -0, 0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 1, -0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1740:
return [ -33803, -3292, 4484, 633, 2442, 1099, -1551, -136, 776, -212, -818, 1104, 138, -315, 822, 352, 617, 47, 341, -766, 92, -291, -164, -178, 52, 315, -84, 427, -51, -117, -7, -54, -49, 99, -111, 49, 22, 16, 30, 49, -173, -91, -4, 95, -61, 35, 0, 30, 20, -5, -8, -26, 8, 4, 37, -44, -14, 48, 1, -34, 26, 11, 57, 9, -2, 0, 4, -6, -5, 7, -1, 7, -0, 5, -1, -4, -0, 21, -1, -2, -2, 3, -2, -1, -1, 3, 5, -6, -0, -2, 5, 6, 1, 1, -8, -2, -0, -3, 5, -1, -0, -0, -0, -1, -1, 0, 0, 0, -2, 1, 0, 1, 3, 1, 0, -2, 1, -2, 1, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 1, 0, 0, 1, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1745:
return [ -33727, -3255, 4538, 640, 2411, 1043, -1497, -29, 791, -236, -844, 1119, 145, -318, 880, 360, 617, 23, 330, -753, 78, -290, -165, -176, 43, 324, -93, 434, -50, -108, -12, -57, -46, 92, -108, 51, 24, 20, 27, 50, -176, -89, -3, 92, -57, 40, 2, 29, 23, -5, -8, -27, 9, 4, 40, -46, -13, 47, -2, -33, 27, 16, 55, 10, -3, -0, 4, -7, -6, 7, -1, 8, -0, 5, -1, -4, 0, 20, -0, -3, -3, 3, -2, -1, -2, 3, 6, -6, -1, -2, 5, 6, 2, 1, -9, -2, -0, -3, 5, -1, -0, -0, -0, -1, -1, 0, 0, 1, -2, 1, 0, 1, 3, 1, 0, -2, 1, -2, 1, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, 0, 0, -0, 1, 0, 0, 1, -0, -0, -0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1750:
return [ -33651, -3213, 4604, 634, 2376, 988, -1441, 92, 808, -267, -866, 1132, 154, -318, 935, 369, 617, 1, 317, -737, 67, -287, -165, -176, 35, 333, -104, 440, -49, -99, -16, -58, -42, 86, -105, 53, 25, 24, 23, 51, -179, -88, -3, 89, -52, 45, 4, 28, 25, -6, -8, -28, 10, 5, 43, -48, -13, 46, -4, -31, 27, 20, 52, 11, -3, -0, 5, -7, -7, 8, -1, 8, -1, 6, -2, -4, 1, 20, 0, -4, -3, 3, -2, -1, -2, 4, 6, -6, -1, -2, 6, 6, 2, 2, -9, -2, -1, -3, 5, -1, -0, -0, -0, -1, -1, 0, 0, 1, -2, 1, 0, 1, 3, 1, 0, -2, 1, -2, 1, -0, 0, -0, 0, 0, -0, -1, 0, 0, 0, 0, 0, -0, 1, 0, 0, 1, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, 0, -0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1755:
return [ -33575, -3172, 4671, 617, 2341, 939, -1381, 215, 826, -305, -884, 1143, 165, -314, 981, 379, 616, -20, 305, -719, 59, -285, -164, -178, 26, 341, -115, 446, -48, -92, -19, -59, -39, 82, -101, 56, 26, 27, 19, 51, -182, -86, -3, 85, -47, 51, 7, 27, 28, -7, -9, -29, 11, 5, 45, -50, -12, 45, -7, -28, 27, 24, 49, 12, -3, -0, 5, -8, -8, 9, -0, 8, -1, 7, -2, -4, 2, 19, 1, -5, -3, 4, -2, -1, -2, 4, 7, -6, -1, -2, 6, 6, 2, 2, -9, -2, -1, -3, 5, -1, -0, -0, -0, -1, -1, 0, 0, 1, -2, 1, 0, 1, 3, 1, 0, -2, 1, -2, 1, -0, 0, -0, 0, 0, -0, -1, 0, 0, 0, 0, 0, -0, 1, 0, 0, 1, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, -0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1760:
return [ -33499, -3160, 4726, 590, 2312, 901, -1319, 326, 844, -347, -897, 1153, 181, -304, 1015, 390, 615, -40, 293, -699, 52, -281, -163, -181, 18, 348, -126, 450, -47, -85, -22, -60, -37, 79, -97, 59, 27, 31, 15, 52, -184, -83, -3, 80, -41, 55, 8, 27, 31, -7, -9, -30, 12, 5, 48, -51, -12, 43, -9, -26, 27, 27, 45, 12, -3, -1, 5, -9, -9, 9, -0, 8, -1, 8, -2, -4, 3, 18, 1, -6, -3, 4, -3, -1, -2, 4, 7, -7, -1, -2, 7, 7, 3, 2, -9, -2, -1, -4, 6, -1, -0, -0, -1, -1, -1, -0, 1, 0, -2, 1, 0, 2, 3, 1, 0, -2, 1, -1, 1, -0, 0, -0, 0, 0, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1765:
return [ -33424, -3194, 4797, 556, 2295, 880, -1253, 417, 863, -392, -906, 1163, 203, -283, 1033, 403, 613, -56, 283, -679, 48, -275, -160, -186, 10, 355, -138, 454, -46, -78, -24, -60, -36, 76, -91, 61, 28, 34, 10, 53, -185, -81, -3, 75, -36, 59, 10, 26, 34, -8, -9, -31, 13, 5, 50, -53, -12, 41, -11, -23, 27, 30, 41, 13, -4, -1, 5, -9, -9, 10, -0, 8, -1, 9, -3, -4, 4, 17, 1, -7, -3, 4, -3, -1, -3, 4, 8, -7, -1, -2, 7, 7, 3, 2, -9, -2, -1, -4, 6, -1, -0, -0, -1, -1, -2, -0, 1, 0, -2, 1, 0, 2, 3, 1, 1, -2, 1, -1, 1, -0, 0, -1, 0, 0, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0]
elif date < 1770:
return [ -33348, -3242, 4889, 516, 2288, 870, -1176, 498, 881, -441, -907, 1172, 228, -254, 1038, 418, 610, -68, 275, -658, 43, -268, -155, -193, 3, 360, -150, 457, -44, -72, -26, -61, -37, 75, -84, 64, 30, 37, 5, 54, -185, -78, -3, 70, -31, 62, 10, 27, 37, -8, -9, -32, 13, 6, 53, -54, -13, 39, -13, -21, 26, 33, 36, 14, -4, -1, 6, -10, -10, 11, -1, 7, -2, 10, -3, -4, 5, 16, 1, -8, -3, 4, -3, -1, -3, 4, 8, -7, -2, -2, 8, 7, 3, 2, -10, -2, -2, -4, 6, -1, -0, -0, -1, -1, -2, -0, 1, 0, -2, 2, 1, 2, 3, 1, 1, -2, 1, -1, 1, -0, 0, -1, 0, 0, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -0, -0, -0, -1, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, -0, -0, -0, -0, -0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0]
elif date < 1775:
return [ -33272, -3293, 4996, 465, 2296, 864, -1093, 576, 900, -495, -899, 1180, 249, -226, 1030, 436, 607, -75, 269, -636, 39, -262, -152, -200, -4, 365, -163, 459, -42, -67, -28, -63, -41, 76, -75, 66, 31, 39, -0, 55, -184, -75, -4, 64, -26, 64, 9, 27, 40, -9, -9, -32, 14, 6, 55, -55, -13, 36, -15, -18, 25, 34, 32, 14, -4, -1, 6, -11, -11, 12, -1, 7, -2, 11, -4, -4, 6, 14, 1, -8, -3, 4, -3, -2, -3, 4, 8, -7, -2, -2, 8, 7, 4, 3, -10, -2, -2, -4, 7, -1, -0, -0, -1, -1, -2, -0, 1, 0, -2, 2, 1, 2, 4, 1, 1, -2, 2, -1, 2, -0, 0, -1, 0, 1, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -0, -0, -0, -1, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, -0, -0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0]
elif date < 1780:
return [ -33197, -3326, 5086, 407, 2318, 855, -1013, 653, 921, -555, -881, 1187, 261, -205, 1016, 456, 605, -76, 265, -614, 34, -258, -151, -208, -10, 369, -175, 461, -40, -63, -29, -64, -45, 77, -64, 69, 33, 40, -6, 57, -182, -71, -4, 58, -20, 66, 8, 29, 43, -9, -9, -33, 15, 6, 58, -56, -14, 33, -16, -15, 24, 35, 27, 15, -4, -1, 6, -11, -12, 13, -1, 7, -2, 12, -4, -4, 7, 13, -0, -9, -4, 4, -3, -2, -3, 5, 9, -7, -2, -2, 9, 7, 4, 3, -10, -2, -2, -5, 7, -1, -0, -0, -1, -1, -2, -0, 1, 0, -2, 2, 1, 2, 4, 1, 1, -2, 2, -1, 2, 0, 0, -1, 0, 1, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -0, -0, -0, -1, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0]
elif date < 1785:
return [ -33121, -3312, 5167, 350, 2349, 838, -951, 721, 942, -618, -854, 1198, 263, -188, 1003, 477, 603, -72, 262, -592, 28, -259, -153, -215, -16, 372, -188, 463, -37, -60, -29, -65, -52, 78, -52, 71, 35, 41, -13, 59, -179, -66, -5, 51, -16, 66, 6, 31, 46, -10, -8, -33, 16, 6, 60, -57, -14, 29, -17, -12, 22, 36, 22, 15, -4, -2, 6, -12, -13, 14, -1, 6, -2, 13, -4, -4, 8, 11, -1, -10, -4, 4, -4, -2, -4, 5, 9, -7, -2, -2, 9, 7, 5, 3, -10, -1, -3, -5, 7, -1, -1, -0, -1, -1, -2, -0, 1, 0, -2, 2, 1, 2, 4, 2, 1, -2, 2, -1, 2, 0, 0, -1, 0, 1, -0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -1, 0, -0, -1, 0, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, -0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, -0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, -0]
elif date < 1790:
return [ -33046, -3279, 5291, 298, 2387, 804, -898, 791, 957, -680, -822, 1213, 260, -167, 996, 499, 601, -64, 259, -569, 21, -262, -156, -218, -20, 374, -199, 466, -34, -58, -28, -65, -58, 78, -38, 72, 37, 41, -19, 61, -174, -61, -6, 44, -11, 66, 4, 33, 49, -10, -8, -33, 16, 6, 63, -57, -15, 25, -18, -10, 19, 35, 16, 16, -4, -2, 6, -13, -13, 15, -2, 6, -2, 14, -5, -4, 9, 9, -2, -10, -4, 5, -4, -2, -4, 5, 9, -7, -2, -1, 10, 7, 5, 3, -10, -1, -3, -6, 8, -2, -1, -0, -1, -1, -3, -1, 1, 0, -2, 2, 1, 2, 4, 2, 1, -2, 2, -1, 2, 0, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0, -0, -0, 1, 0, 0, 1, -1, 0, -0, -1, 0, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0]
elif date < 1795:
return [ -32970, -3235, 5460, 260, 2433, 746, -828, 891, 965, -740, -793, 1234, 254, -135, 995, 520, 600, -52, 256, -547, 12, -264, -155, -218, -23, 375, -211, 469, -32, -56, -26, -62, -66, 79, -25, 73, 40, 41, -26, 64, -168, -56, -7, 35, -6, 65, 2, 34, 52, -10, -8, -33, 17, 6, 65, -57, -16, 21, -19, -8, 16, 35, 11, 16, -4, -2, 6, -14, -14, 16, -2, 5, -2, 15, -5, -4, 9, 7, -4, -10, -4, 5, -4, -2, -4, 5, 10, -7, -2, -1, 11, 7, 6, 4, -10, -1, -3, -6, 8, -2, -1, -0, -1, -1, -3, -1, 2, 0, -2, 2, 1, 2, 4, 1, 1, -2, 2, -1, 1, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 0, -0, -0, 1, 1, 0, 1, -1, 0, -0, -1, 1, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, -0, 0]
elif date < 1800:
return [ -32894, -3181, 5582, 241, 2474, 668, -750, 997, 966, -794, -766, 1258, 250, -97, 994, 540, 599, -39, 256, -525, -0, -264, -151, -215, -26, 376, -221, 472, -30, -54, -23, -59, -73, 79, -11, 74, 43, 40, -33, 67, -161, -50, -9, 26, -1, 64, 1, 36, 54, -11, -8, -33, 17, 7, 67, -57, -18, 16, -19, -6, 13, 34, 6, 17, -4, -2, 6, -14, -15, 17, -2, 4, -2, 15, -6, -3, 10, 4, -6, -10, -4, 5, -4, -2, -4, 5, 10, -7, -3, -1, 11, 7, 6, 4, -10, -1, -4, -6, 8, -2, -1, -0, -1, -1, -3, -1, 2, -0, -2, 3, 1, 2, 4, 1, 1, -2, 2, -0, 2, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 1, -0, -0, 1, 1, 0, 1, -1, 0, -0, -1, 1, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0]
elif date < 1805:
return [ -32819, -3144, 5651, 236, 2510, 583, -676, 1088, 963, -843, -741, 1281, 250, -56, 988, 560, 599, -22, 259, -502, -17, -260, -143, -209, -29, 377, -231, 475, -27, -53, -20, -55, -79, 78, 2, 73, 45, 38, -40, 71, -155, -44, -10, 16, 3, 63, -1, 36, 57, -12, -8, -33, 17, 7, 69, -56, -19, 12, -19, -4, 10, 32, 1, 17, -4, -2, 6, -15, -16, 18, -2, 4, -2, 16, -6, -3, 10, 2, -7, -9, -4, 5, -4, -2, -4, 5, 10, -7, -3, -1, 12, 7, 6, 4, -9, -1, -4, -7, 9, -2, -1, -0, -1, -1, -3, -1, 2, -0, -2, 3, 1, 2, 4, 1, 1, -2, 2, -0, 2, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 1, -0, -0, 2, 1, 0, 1, -1, 0, -0, -1, 1, 0, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0]
elif date < 1810:
return [ -32744, -3133, 5695, 238, 2550, 500, -609, 1159, 959, -888, -715, 1301, 253, -14, 976, 579, 599, -2, 264, -480, -36, -255, -134, -203, -32, 378, -240, 478, -25, -52, -16, -52, -85, 78, 15, 72, 48, 36, -46, 74, -148, -38, -12, 7, 7, 63, -2, 36, 59, -13, -8, -33, 17, 8, 71, -55, -21, 7, -19, -3, 7, 31, -4, 17, -4, -2, 6, -16, -16, 18, -3, 3, -1, 17, -6, -3, 11, -0, -9, -9, -4, 5, -4, -2, -4, 5, 10, -7, -3, -1, 12, 7, 7, 4, -9, -1, -5, -7, 9, -2, -1, 0, -1, -1, -3, -1, 2, -0, -2, 3, 1, 2, 4, 1, 1, -2, 2, 0, 2, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, 1, -0, -0, 2, 1, 0, 2, -1, 0, -0, -1, 1, 1, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0]
elif date < 1815:
return [ -32668, -3120, 5738, 241, 2596, 425, -545, 1216, 955, -931, -687, 1316, 257, 26, 963, 598, 599, 21, 272, -457, -58, -248, -122, -195, -35, 379, -247, 481, -21, -52, -13, -50, -90, 77, 28, 70, 51, 34, -52, 78, -141, -32, -14, -3, 11, 62, -3, 35, 61, -14, -8, -33, 17, 9, 72, -55, -23, 2, -19, -2, 4, 29, -8, 18, -4, -2, 6, -16, -17, 19, -3, 2, -1, 18, -7, -3, 11, -2, -11, -8, -4, 5, -4, -2, -4, 5, 10, -6, -3, -0, 13, 7, 7, 4, -9, -2, -5, -7, 10, -2, -1, 0, -1, -1, -3, -1, 2, -0, -2, 3, 1, 2, 5, 1, 1, -2, 2, 0, 2, 2, 0, -1, 0, 1, 0, -1, 0, 1, 0, 1, -0, -0, 2, 1, 0, 2, -1, 0, -0, -1, 1, 1, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0]
elif date < 1820:
return [ -32593, -3100, 5765, 237, 2644, 355, -474, 1260, 952, -972, -657, 1326, 257, 62, 952, 615, 598, 44, 282, -433, -80, -241, -109, -187, -38, 379, -254, 483, -18, -52, -10, -50, -94, 78, 40, 67, 54, 31, -58, 81, -136, -27, -16, -12, 13, 62, -4, 33, 63, -15, -8, -32, 16, 11, 73, -53, -25, -2, -18, -1, 1, 28, -12, 18, -4, -2, 5, -17, -18, 20, -3, 2, -1, 18, -7, -3, 11, -4, -12, -7, -4, 5, -4, -3, -4, 5, 10, -6, -3, -0, 13, 7, 8, 4, -9, -2, -5, -7, 10, -2, -1, 0, -1, -1, -3, -1, 2, -1, -2, 3, 2, 2, 5, 1, 1, -2, 2, 1, 2, 2, 0, -1, 0, 1, 0, -1, 0, 1, 0, 1, -0, -0, 2, 1, 0, 2, -1, 0, -0, -1, 1, 1, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0]
elif date < 1825:
return [ -32518, -3070, 5757, 223, 2687, 286, -385, 1286, 950, -1012, -624, 1330, 251, 92, 944, 631, 595, 69, 294, -410, -104, -236, -95, -179, -43, 378, -261, 484, -13, -54, -8, -51, -97, 79, 50, 63, 56, 28, -64, 85, -131, -23, -20, -21, 15, 62, -6, 30, 65, -16, -9, -32, 16, 12, 74, -52, -27, -7, -17, -1, -1, 26, -16, 18, -3, -2, 4, -18, -18, 20, -3, 1, -1, 19, -8, -3, 10, -6, -14, -6, -4, 6, -4, -3, -4, 5, 10, -6, -3, 0, 14, 6, 8, 4, -8, -2, -6, -7, 10, -2, -1, 0, -1, -1, -4, -1, 3, -1, -1, 3, 2, 2, 5, 1, 1, -2, 2, 1, 2, 2, 0, -1, 0, 1, 0, -1, 0, 1, -0, 1, -0, -0, 2, 1, 0, 2, -1, 1, -0, -1, 1, 1, -0, 0, -0, 0, -0, -0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1830:
return [ -32443, -3031, 5744, 197, 2722, 217, -295, 1289, 953, -1049, -591, 1332, 237, 116, 939, 646, 591, 92, 309, -388, -126, -232, -82, -172, -48, 377, -266, 484, -9, -57, -7, -53, -99, 82, 60, 59, 58, 26, -68, 88, -127, -19, -24, -29, 16, 63, -9, 26, 66, -18, -10, -32, 15, 14, 73, -51, -29, -11, -16, -2, -3, 25, -20, 18, -3, -2, 4, -18, -19, 20, -3, 0, -1, 20, -8, -3, 10, -8, -15, -4, -4, 6, -4, -3, -4, 5, 10, -5, -3, 1, 14, 6, 8, 3, -8, -2, -6, -7, 11, -2, -1, 0, -1, -1, -4, -1, 3, -1, -1, 4, 2, 2, 5, 1, 0, -2, 2, 1, 2, 3, 0, -1, 0, 1, 1, -1, 0, 1, -0, 1, -0, -0, 2, 1, 0, 2, -1, 1, -0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1835:
return [ -32367, -2944, 5750, 161, 2743, 143, -217, 1291, 966, -1079, -558, 1334, 223, 147, 936, 660, 588, 114, 328, -367, -146, -230, -71, -164, -54, 375, -271, 482, -6, -59, -8, -55, -101, 86, 66, 53, 60, 23, -73, 90, -125, -17, -28, -37, 17, 63, -13, 21, 67, -20, -11, -31, 15, 16, 72, -49, -30, -15, -15, -2, -4, 24, -23, 18, -3, -1, 3, -19, -20, 20, -3, -0, -1, 20, -9, -3, 9, -9, -16, -3, -4, 6, -4, -3, -4, 4, 10, -5, -3, 1, 14, 6, 8, 3, -7, -2, -6, -7, 11, -2, -0, 0, -1, -0, -4, -1, 3, -1, -1, 4, 2, 2, 4, 1, 0, -2, 2, 1, 2, 3, 0, -1, 0, 1, 1, -1, 0, 1, -0, 1, -0, -0, 2, 1, 0, 2, -1, 1, -0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1840:
return [ -32290, -2847, 5784, 117, 2748, 85, -127, 1318, 993, -1103, -520, 1338, 207, 199, 939, 674, 587, 131, 352, -348, -161, -231, -58, -156, -61, 373, -275, 477, -5, -58, -10, -56, -102, 90, 69, 48, 62, 22, -77, 92, -125, -17, -32, -44, 18, 64, -17, 15, 68, -23, -12, -31, 14, 17, 71, -48, -32, -19, -14, -3, -4, 23, -25, 18, -2, -1, 2, -19, -20, 20, -2, -1, -0, 20, -9, -2, 8, -10, -16, -1, -4, 6, -4, -2, -3, 4, 9, -5, -3, 1, 15, 5, 9, 2, -7, -3, -6, -7, 12, -2, -0, 1, -1, -0, -4, -1, 3, -1, -1, 4, 2, 2, 4, 1, 0, -2, 2, 1, 2, 3, 0, -1, 0, 1, 1, -1, 0, 1, -0, 1, -0, -0, 2, 1, -0, 2, -0, 1, 0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, -0, 0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1845:
return [ -32215, -2804, 5797, 54, 2749, 46, -46, 1357, 1033, -1127, -484, 1353, 185, 262, 936, 690, 588, 144, 375, -331, -169, -234, -45, -145, -68, 371, -278, 469, -8, -54, -11, -56, -104, 94, 69, 43, 65, 20, -81, 92, -126, -17, -35, -50, 19, 66, -21, 9, 68, -25, -13, -30, 13, 19, 68, -46, -34, -23, -13, -4, -4, 22, -27, 17, -2, -1, 1, -19, -20, 20, -2, -1, -0, 20, -10, -2, 7, -10, -16, 0, -3, 6, -4, -2, -3, 4, 9, -4, -3, 2, 15, 5, 9, 2, -6, -3, -6, -7, 12, -2, -0, 1, -1, -0, -3, -1, 3, -1, -0, 4, 2, 2, 4, 0, 0, -1, 2, 1, 2, 3, 0, -1, 0, 1, 1, -1, 0, 1, -0, 1, -1, -0, 2, 1, -0, 2, -0, 1, 0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1850:
return [ -32163, -2745, 5792, -9, 2760, -1, 4, 1366, 1072, -1148, -450, 1375, 158, 317, 917, 709, 590, 153, 398, -316, -173, -235, -30, -131, -75, 369, -281, 457, -12, -47, -10, -55, -106, 97, 66, 38, 67, 20, -83, 91, -129, -19, -38, -57, 21, 67, -24, 2, 68, -27, -15, -29, 11, 21, 65, -45, -36, -26, -12, -5, -4, 22, -28, 17, -1, -0, -0, -20, -21, 19, -1, -2, -0, 20, -11, -2, 6, -10, -15, 2, -3, 7, -4, -2, -2, 3, 8, -4, -3, 2, 15, 4, 8, 1, -5, -3, -6, -6, 13, -3, -0, 1, -1, 0, -3, -1, 3, -1, 0, 4, 2, 2, 4, 0, 0, -1, 2, 2, 2, 3, 0, -1, 0, 1, 1, -1, 0, 1, -0, 1, -1, -0, 2, 1, -0, 2, -0, 1, 0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1855:
return [ -32148, -2700, 5826, -63, 2792, -89, 49, 1379, 1105, -1170, -419, 1403, 133, 363, 887, 729, 595, 161, 420, -301, -180, -232, -14, -117, -83, 368, -283, 442, -16, -37, -9, -53, -108, 102, 61, 35, 69, 21, -84, 90, -133, -21, -42, -62, 23, 68, -26, -4, 68, -29, -17, -28, 10, 23, 61, -43, -38, -29, -11, -6, -3, 23, -30, 17, -0, 0, -1, -20, -21, 19, -1, -2, -0, 19, -12, -2, 5, -10, -14, 3, -3, 7, -4, -2, -1, 2, 8, -3, -2, 2, 15, 4, 8, 0, -5, -4, -7, -6, 13, -3, -0, 1, -1, 0, -3, -1, 3, -1, 1, 4, 2, 2, 4, 0, 0, -1, 2, 2, 2, 3, 0, -1, 0, 1, 1, -0, 0, 1, -0, 1, -1, -0, 2, 1, -0, 2, -0, 1, 0, -1, 1, 1, -0, 0, -0, 0, -0, 0, -0, 0, -0, -0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, 0, 0, -0, -0, 0, -0, 0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1860:
return [ -32139, -2676, 5860, -129, 2832, -193, 121, 1404, 1126, -1197, -383, 1428, 110, 400, 850, 749, 601, 168, 442, -283, -191, -226, 6, -106, -93, 368, -285, 425, -18, -26, -7, -52, -110, 108, 55, 33, 70, 22, -84, 87, -139, -23, -45, -66, 26, 69, -27, -12, 68, -31, -19, -26, 8, 25, 57, -42, -40, -32, -9, -6, -3, 24, -31, 16, 0, 1, -3, -20, -21, 19, -1, -2, 0, 19, -13, -1, 4, -10, -13, 4, -2, 7, -4, -2, -1, 2, 7, -2, -2, 3, 15, 3, 8, -1, -4, -5, -7, -5, 13, -3, 0, 1, -1, 1, -3, -1, 3, -2, 1, 4, 2, 2, 3, -0, 0, -1, 1, 2, 3, 4, 0, -1, 1, 0, 1, -0, 0, 1, -0, 1, -1, -0, 1, 1, -0, 2, -0, 1, 0, -1, 1, 1, 0, 0, -0, 0, -0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1865:
return [ -32118, -2644, 5906, -179, 2861, -304, 222, 1431, 1131, -1224, -354, 1437, 87, 422, 804, 767, 607, 176, 463, -263, -208, -221, 27, -102, -102, 368, -286, 407, -18, -15, -5, -50, -112, 116, 48, 32, 70, 23, -82, 85, -145, -25, -49, -70, 30, 70, -27, -20, 68, -32, -21, -25, 7, 27, 53, -41, -41, -35, -7, -6, -3, 25, -32, 16, 1, 1, -4, -21, -20, 18, -0, -2, 0, 18, -14, -1, 3, -10, -11, 5, -2, 8, -4, -2, -0, 1, 6, -2, -2, 3, 15, 3, 8, -1, -3, -5, -6, -4, 13, -3, 0, 1, -1, 1, -3, -1, 3, -2, 1, 4, 3, 2, 3, -0, -0, -0, 1, 2, 3, 4, 0, -1, 1, 0, 1, -0, 0, 1, -0, 1, -1, -0, 1, 1, -0, 2, -0, 1, 0, -1, 1, 1, 0, 0, -0, 0, -0, 0, -0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0]
elif date < 1870:
return [ -32066, -2581, 5927, -232, 2880, -426, 335, 1468, 1121, -1254, -333, 1432, 62, 429, 758, 783, 612, 185, 484, -241, -228, -215, 50, -103, -112, 368, -286, 389, -17, -6, -4, -49, -114, 123, 40, 32, 70, 24, -78, 82, -151, -27, -53, -71, 34, 70, -26, -28, 68, -34, -24, -23, 5, 29, 48, -40, -42, -38, -5, -6, -3, 26, -33, 15, 2, 1, -5, -21, -20, 17, 0, -2, 0, 16, -15, -1, 1, -9, -9, 5, -1, 8, -5, -1, 1, -0, 6, -1, -2, 3, 15, 2, 8, -2, -2, -6, -6, -3, 14, -3, 0, 1, -1, 1, -3, -1, 2, -2, 2, 3, 3, 2, 3, -1, -0, -0, 1, 2, 3, 4, 0, -1, 1, 0, 1, -0, 0, 1, -0, 1, -1, -0, 1, 1, -0, 2, -0, 1, 0, -1, 1, 1, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, 0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0]
elif date < 1875:
return [ -31996, -2532, 5946, -290, 2888, -548, 445, 1495, 1103, -1286, -318, 1420, 37, 432, 710, 798, 617, 195, 506, -218, -250, -209, 74, -107, -121, 367, -287, 372, -13, 1, -5, -49, -115, 129, 33, 33, 70, 26, -72, 79, -159, -29, -57, -72, 38, 69, -25, -37, 68, -36, -27, -21, 3, 31, 43, -40, -42, -41, -3, -6, -2, 27, -34, 15, 3, 2, -6, -21, -19, 17, 1, -2, 1, 15, -16, -0, 0, -9, -7, 5, -0, 8, -5, -1, 2, -1, 5, -1, -1, 4, 15, 2, 7, -3, -2, -6, -6, -2, 14, -3, 1, 1, -1, 2, -2, -0, 2, -2, 2, 3, 3, 2, 2, -1, -0, 0, 1, 2, 3, 3, 0, -1, 1, 0, 1, 0, 0, 1, -1, 1, -1, -0, 1, 1, -1, 2, -0, 1, 0, -1, 1, 1, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -1, 0, -0, 0, -0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0]
elif date < 1880:
return [ -31909, -2471, 5968, -357, 2898, -656, 539, 1493, 1084, -1326, -310, 1403, 13, 433, 669, 812, 619, 207, 526, -196, -270, -203, 94, -113, -131, 366, -287, 354, -8, 6, -6, -49, -117, 129, 25, 35, 68, 27, -66, 76, -166, -31, -61, -70, 42, 69, -25, -45, 68, -38, -30, -18, 2, 33, 38, -40, -42, -43, -0, -5, -2, 28, -35, 14, 3, 2, -7, -21, -18, 16, 1, -1, 1, 14, -17, 1, -1, -8, -5, 5, 0, 8, -5, -1, 2, -2, 4, 0, -1, 4, 15, 1, 7, -4, -1, -7, -6, -2, 13, -3, 1, 1, -1, 2, -2, -0, 2, -2, 2, 3, 3, 1, 2, -1, -0, 0, 1, 2, 3, 3, 0, -1, 1, 0, 1, 0, 0, 1, -1, 1, -1, -0, 1, 1, -1, 2, -0, 1, 0, -1, 1, 1, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -1, 0, -0, 0, 0, 0, 0, -0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0]
elif date < 1885:
return [ -31809, -2400, 5974, -428, 2912, -749, 604, 1439, 1075, -1373, -314, 1382, -10, 435, 636, 827, 620, 218, 544, -174, -286, -196, 109, -117, -140, 363, -286, 337, -2, 9, -8, -49, -118, 123, 17, 36, 66, 28, -59, 73, -174, -32, -65, -67, 46, 67, -26, -54, 68, -40, -33, -16, 1, 35, 34, -39, -41, -45, 2, -3, -3, 28, -36, 13, 4, 2, -8, -22, -18, 16, 1, -1, 1, 13, -18, 1, -1, -8, -3, 5, 1, 8, -5, -1, 3, -3, 3, 1, -1, 4, 14, 0, 7, -5, -0, -7, -6, -1, 13, -3, 1, 1, -1, 2, -2, -0, 2, -2, 3, 3, 3, 1, 1, -1, -0, 1, 1, 2, 4, 3, 0, -1, 1, 0, 1, 0, 0, 1, -1, 1, -1, -0, 1, 1, -1, 2, -0, 1, 0, -0, 1, 1, 0, 0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, 0, -1, -0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0]
elif date < 1890:
return [ -31712, -2367, 5960, -500, 2930, -827, 667, 1371, 1069, -1415, -322, 1354, -21, 449, 611, 842, 621, 230, 564, -153, -301, -187, 121, -117, -149, 360, -285, 320, 5, 10, -9, -50, -119, 112, 10, 38, 63, 28, -51, 72, -182, -34, -68, -63, 49, 65, -29, -62, 68, -42, -36, -14, -0, 37, 29, -39, -40, -45, 5, -1, -3, 29, -37, 13, 4, 2, -9, -22, -17, 15, 2, -1, 1, 12, -19, 2, -2, -7, -1, 4, 2, 8, -6, -0, 4, -4, 3, 1, -1, 4, 14, -0, 7, -6, 1, -7, -6, 0, 13, -3, 1, 2, -1, 2, -2, -0, 2, -2, 3, 3, 3, 1, 1, -1, -0, 1, 1, 2, 4, 3, 0, -1, 1, -0, 1, 1, 0, 1, -1, 1, -1, -0, 1, 1, -1, 2, -0, 1, 0, -0, 1, 1, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -1, -0, -0, 0, 0, 0, 0, -0, 0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, 0, -0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0]
elif date < 1895:
return [ -31625, -2358, 5944, -572, 2948, -892, 738, 1301, 1061, -1446, -333, 1319, -22, 481, 589, 855, 624, 241, 584, -134, -319, -180, 133, -108, -157, 356, -281, 304, 14, 8, -9, -52, -119, 96, 5, 41, 61, 28, -44, 72, -191, -35, -71, -58, 53, 61, -33, -69, 68, -44, -40, -11, -1, 39, 24, -39, -39, -45, 8, 2, -3, 29, -38, 12, 5, 2, -9, -22, -16, 14, 2, -0, 2, 10, -19, 3, -2, -6, 1, 3, 2, 8, -6, -0, 5, -4, 2, 2, -0, 5, 14, -1, 7, -7, 2, -8, -6, 1, 12, -3, 1, 2, -1, 2, -2, -0, 1, -2, 3, 2, 3, 1, 0, -2, -0, 1, 1, 2, 4, 3, 0, -1, 1, -0, 1, 1, 0, 1, -1, 1, -1, -0, 1, 1, -1, 2, -0, 1, 0, -0, 1, 1, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -1, -0, -0, 0, 0, 0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, -0, -0, 0, -0, -0, -0, -0, 0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, -0, 0]
elif date < 1900:
return [ -31559, -2332, 5939, -640, 2954, -959, 828, 1220, 1053, -1467, -354, 1287, -12, 528, 561, 866, 629, 247, 606, -118, -341, -176, 144, -89, -166, 352, -276, 289, 23, 4, -9, -56, -119, 76, 4, 44, 58, 28, -37, 73, -199, -35, -73, -52, 56, 57, -38, -75, 68, -46, -43, -9, -2, 40, 19, -39, -36, -44, 11, 6, -3, 28, -38, 11, 5, 2, -10, -21, -15, 14, 3, -0, 3, 9, -20, 4, -2, -5, 3, 2, 3, 8, -7, 0, 6, -5, 2, 3, -0, 5, 13, -1, 6, -7, 2, -8, -5, 2, 12, -3, 1, 2, -1, 3, -1, -0, 1, -2, 4, 2, 3, 1, -0, -2, -1, 1, 1, 2, 4, 3, 0, -1, 1, -0, 2, 1, 0, 1, -1, 1, -1, -0, 1, 0, -1, 2, -0, 1, 0, -0, 1, 1, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -0, -0, -0, -1, -0, -0, 0, 0, 0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1905:
return [ -31492, -2303, 5930, -702, 2942, -1011, 935, 1143, 1044, -1486, -378, 1260, 15, 588, 525, 874, 636, 253, 625, -107, -364, -174, 155, -66, -175, 348, -265, 275, 33, -1, -8, -63, -121, 53, 8, 47, 55, 27, -30, 76, -208, -35, -74, -47, 59, 52, -45, -79, 68, -48, -46, -6, -2, 41, 14, -38, -34, -42, 13, 10, -4, 28, -38, 11, 5, 2, -10, -21, -14, 13, 4, -0, 3, 8, -20, 5, -2, -4, 4, 0, 4, 8, -8, 0, 6, -6, 1, 3, 0, 5, 12, -1, 6, -8, 3, -8, -5, 2, 11, -3, 1, 2, -1, 3, -1, 0, 1, -1, 4, 2, 3, 1, -1, -2, -1, 2, 1, 2, 4, 2, 0, -1, 1, -0, 2, 1, 0, 1, -1, 1, -1, -0, 1, 0, -1, 2, -0, 1, 0, -0, 1, 1, 1, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, -0, -1, -0, -0, -1, -0, -0, 0, 0, 0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1910:
return [ -31397, -2299, 5907, -752, 2948, -1051, 1048, 1080, 1047, -1507, -404, 1241, 53, 651, 481, 880, 646, 259, 638, -104, -385, -174, 164, -50, -185, 343, -248, 262, 41, -6, -6, -72, -125, 27, 15, 50, 53, 26, -24, 81, -217, -34, -74, -41, 60, 45, -53, -81, 69, -49, -48, -3, -3, 42, 9, -38, -31, -40, 16, 14, -4, 28, -38, 10, 5, 2, -10, -21, -13, 12, 5, -1, 4, 7, -20, 6, -1, -3, 6, -1, 5, 8, -9, 1, 7, -7, 1, 4, 0, 6, 12, -1, 6, -8, 4, -8, -5, 3, 10, -3, 1, 2, -1, 3, -1, 0, 1, -1, 4, 2, 3, 1, -1, -2, -1, 2, 1, 2, 4, 2, 0, -1, 1, -0, 2, 1, -0, 1, -1, 1, -0, -0, 0, 0, -1, 2, -0, 1, 0, 0, 1, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, 0, 0, -0, -1, -0, -0, -1, -0, -0, 0, 0, 0, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1915:
return [ -31295, -2312, 5884, -783, 2962, -1093, 1172, 1015, 1062, -1532, -436, 1232, 86, 709, 418, 885, 659, 265, 641, -112, -404, -171, 174, -42, -197, 338, -224, 248, 48, -10, -4, -86, -129, 1, 24, 54, 51, 26, -19, 87, -225, -32, -71, -35, 60, 36, -61, -80, 70, -50, -50, 0, -3, 42, 5, -37, -27, -36, 19, 18, -6, 28, -38, 10, 5, 2, -10, -20, -12, 11, 6, -1, 5, 6, -19, 7, -0, -3, 7, -3, 6, 8, -10, 1, 8, -7, 0, 4, 0, 6, 11, -1, 6, -8, 5, -7, -4, 3, 9, -3, 1, 2, -1, 3, -1, 0, 0, -1, 4, 1, 3, 1, -1, -2, -1, 2, 1, 2, 4, 2, 0, -1, 1, -1, 2, 1, -0, 1, -1, 1, -0, -1, 0, 0, -1, 2, -0, 1, 0, 0, 1, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, 0, 0, -0, -1, -0, -0, -1, -0, -0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, -0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1920:
return [ -31165, -2326, 5850, -809, 2962, -1151, 1302, 934, 1082, -1559, -473, 1223, 103, 774, 346, 888, 675, 272, 630, -128, -419, -159, 193, -42, -210, 333, -195, 232, 52, -15, -3, -101, -130, -26, 35, 58, 49, 26, -15, 93, -233, -28, -66, -27, 56, 26, -70, -76, 72, -51, -50, 3, -4, 42, 0, -35, -22, -32, 21, 21, -7, 28, -37, 9, 5, 2, -10, -20, -11, 9, 7, -2, 6, 6, -18, 9, 1, -2, 8, -5, 7, 8, -11, 1, 9, -8, 0, 5, 0, 7, 10, -1, 6, -8, 6, -7, -4, 3, 8, -3, 1, 2, -1, 3, -1, 0, 0, -1, 5, 1, 3, 1, -1, -2, -1, 2, 1, 2, 4, 1, 0, -1, 1, -1, 2, 2, -0, 1, -1, 1, -0, -1, 0, 0, -1, 2, 0, 1, 0, 0, 0, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, -0, 0, -0, -1, -0, -0, -1, -0, -0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1925:
return [ -31029, -2336, 5818, -843, 2962, -1215, 1397, 843, 1112, -1596, -498, 1210, 120, 834, 280, 892, 693, 273, 619, -150, -428, -143, 219, -53, -224, 330, -165, 216, 55, -19, -7, -113, -131, -46, 47, 61, 47, 24, -12, 98, -240, -22, -60, -20, 51, 17, -80, -70, 73, -51, -49, 6, -6, 41, -4, -34, -17, -28, 24, 23, -9, 28, -36, 9, 5, 2, -9, -19, -11, 8, 8, -3, 8, 5, -17, 10, 3, -1, 9, -7, 7, 8, -12, 2, 9, -8, 0, 6, 0, 7, 9, -1, 7, -8, 7, -7, -4, 3, 7, -3, 0, 2, -0, 3, -1, 0, -0, -0, 5, 1, 3, 1, -1, -2, -1, 3, 1, 1, 4, 1, 1, -2, 1, -1, 2, 2, -0, 1, -2, 1, -0, -1, 0, 0, -1, 2, 0, 1, 0, 0, 0, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, -0, 0, -0, -1, -0, -0, -1, -0, 0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, 0, -0, 0, -0, 0, 0, -0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1930:
return [ -30903, -2326, 5793, -903, 2972, -1291, 1472, 749, 1142, -1641, -514, 1204, 131, 880, 221, 898, 712, 268, 610, -180, -433, -123, 242, -72, -237, 329, -136, 203, 58, -23, -16, -123, -130, -60, 57, 63, 46, 21, -10, 101, -246, -15, -51, -14, 45, 9, -88, -62, 74, -52, -48, 7, -8, 39, -8, -32, -13, -24, 27, 24, -12, 27, -34, 8, 5, 2, -9, -18, -10, 7, 8, -4, 9, 5, -16, 11, 4, -1, 10, -9, 8, 8, -14, 2, 10, -9, 0, 6, 0, 8, 8, -1, 7, -7, 7, -6, -3, 3, 6, -3, 0, 2, -0, 3, -1, 0, -0, 0, 5, 1, 4, 1, -2, -2, -1, 3, 1, 1, 4, 0, 1, -2, 1, -1, 2, 2, -0, 1, -2, 1, -0, -1, 0, 0, -1, 2, 0, 1, -0, 0, 0, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, -0, 0, -0, -1, -0, -0, -1, -0, 0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1935:
return [ -30788, -2313, 5786, -963, 2980, -1388, 1526, 663, 1177, -1693, -528, 1199, 145, 913, 162, 906, 733, 257, 594, -214, -431, -106, 255, -93, -248, 330, -106, 192, 64, -26, -29, -132, -128, -69, 65, 64, 46, 18, -8, 104, -252, -6, -42, -11, 38, 2, -95, -55, 75, -52, -47, 8, -10, 36, -10, -31, -9, -20, 29, 24, -14, 27, -32, 8, 5, 3, -8, -18, -10, 6, 8, -6, 9, 4, -15, 13, 5, -0, 10, -11, 9, 8, -15, 2, 11, -9, 0, 7, 0, 8, 6, -1, 7, -7, 8, -5, -3, 3, 5, -3, 0, 2, -0, 3, -1, 0, -1, 1, 5, 0, 4, 1, -2, -2, -1, 3, 1, 1, 3, -0, 1, -2, 1, -1, 2, 2, -1, 1, -2, 1, -0, -1, 0, 0, -1, 2, -0, 0, -0, 0, 0, 1, 1, -0, -0, 0, -1, 0, 0, 1, -0, -0, 0, -0, -1, -0, -1, -1, -0, 0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, 0, -0, -0, -0, 0, -0, 0, 0, 0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1940:
return [ -30703, -2300, 5797, -1038, 2979, -1491, 1564, 613, 1209, -1742, -536, 1210, 155, 921, 89, 916, 756, 236, 572, -248, -427, -89, 269, -109, -254, 334, -79, 186, 71, -26, -42, -140, -126, -75, 69, 63, 46, 13, -7, 107, -256, 5, -33, -9, 30, -4, -100, -47, 76, -52, -45, 8, -13, 32, -12, -31, -6, -17, 31, 23, -16, 26, -30, 7, 5, 4, -8, -17, -11, 6, 8, -7, 10, 4, -13, 14, 7, 0, 10, -13, 9, 7, -17, 2, 12, -10, 1, 7, 0, 8, 5, -1, 7, -6, 9, -4, -2, 2, 4, -2, -0, 1, 0, 3, -1, 0, -1, 1, 5, -0, 4, 1, -1, -2, -1, 3, 1, 1, 3, -1, 1, -2, 1, -1, 2, 2, -1, 0, -2, 1, -0, -1, -0, 0, -1, 2, -0, 0, -0, 1, -0, 2, 1, -0, -0, 0, -1, 0, 0, 1, 0, -0, 0, -0, -1, -0, -1, -1, -0, 0, 0, 0, 1, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, -0, 0, -0, 0, 0, -0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
elif date < 1945:
return [ -30649, -2288, 5805, -1123, 2973, -1588, 1579, 550, 1239, -1793, -538, 1230, 172, 917, 30, 930, 775, 207, 555, -273, -418, -67, 291, -133, -256, 339, -55, 186, 78, -24, -57, -146, -124, -78, 71, 61, 47, 9, -5, 108, -259, 18, -24, -10, 21, -8, -104, -39, 75, -52, -44, 7, -16, 28, -12, -31, -3, -14, 31, 22, -17, 24, -28, 7, 5, 5, -7, -16, -11, 6, 7, -8, 10, 3, -12, 16, 9, 0, 10, -14, 10, 7, -18, 3, 12, -10, 1, 8, 0, 7, 4, -1, 8, -5, 10, -3, -2, 2, 4, -2, -0, 1, 0, 3, -2, 0, -1, 1, 6, -1, 4, 1, -1, -2, -1, 3, 2, 1, 3, -1, 1, -2, 1, -1, 2, 2, -1, 0, -2, 1, -0, -1, -0, 0, -1, 2, -0, 0, -1, 1, -0, 2, 1, -0, -0, 0, -1, 0, 0, 1, 0, -0, 0, -0, -1, -0, -1, -1, -0, 0, 0, 0, 1, -1, -0, 0, -0, 0, 0, -0, -0, 0, 0, -0, 0, -0, 0, 0, -0, -0, -0, -0, 0, -0, -0, 0, 0, 0, 0, 0, 0, -0, -0, -0, -0, 0, 0, 0, -0, -0, -0, -0, -0, 0, 0, -0, -0, 0, -0, 0, 0, 0, -0, 0, 0, 0, -0, -0, -0, 0, 0, 0, 0, 0]
| 985.097222
| 2,031
| 0.165015
| 15,749
| 141,854
| 1.485999
| 0.078926
| 0.580438
| 0.827971
| 1.069436
| 0.453873
| 0.445028
| 0.436824
| 0.423023
| 0.414861
| 0.409947
| 0
| 0.481133
| 0.672311
| 141,854
| 143
| 2,032
| 991.986014
| 0.02233
| 0
| 0
| 0
| 0
| 0
| 0.000099
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006993
| false
| 0
| 0.006993
| 0
| 0.496504
| 0.013986
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
229eb8ba7137e082ab8a37b610bdbbb49b4e514d
| 47
|
py
|
Python
|
main.py
|
StuartSul/RL_Tic-Tac-Toe
|
c1cd7d7760b2a6a9f06684502dc383632979b692
|
[
"MIT"
] | null | null | null |
main.py
|
StuartSul/RL_Tic-Tac-Toe
|
c1cd7d7760b2a6a9f06684502dc383632979b692
|
[
"MIT"
] | null | null | null |
main.py
|
StuartSul/RL_Tic-Tac-Toe
|
c1cd7d7760b2a6a9f06684502dc383632979b692
|
[
"MIT"
] | null | null | null |
from src import tic_tac_toe
tic_tac_toe.run()
| 11.75
| 27
| 0.808511
| 10
| 47
| 3.4
| 0.7
| 0.352941
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 47
| 3
| 28
| 15.666667
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
22c4b19bca20440ca9c78f4e5a1ee729ba4ab06e
| 339
|
py
|
Python
|
tests/exceptions/test_no_columns_exception.py
|
AustinScola/illud
|
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
|
[
"MIT"
] | 1
|
2020-12-05T00:59:15.000Z
|
2020-12-05T00:59:15.000Z
|
tests/exceptions/test_no_columns_exception.py
|
AustinScola/illud
|
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
|
[
"MIT"
] | 112
|
2021-01-15T21:42:27.000Z
|
2021-04-17T19:11:21.000Z
|
tests/exceptions/test_no_columns_exception.py
|
AustinScola/illud
|
a6aca1de38bbe9d5a795aaa084bcbd6731767d18
|
[
"MIT"
] | null | null | null |
"""Test illud.exceptions.no_columns_exception."""
from illud.exception import IlludException
from illud.exceptions.no_columns_exception import NoColumnsException
def test_inheritance() -> None:
"""Test illud.exceptions.no_columns_exception.NoColumnsException inheritance."""
assert issubclass(NoColumnsException, IlludException)
| 37.666667
| 84
| 0.823009
| 35
| 339
| 7.771429
| 0.428571
| 0.165441
| 0.1875
| 0.264706
| 0.393382
| 0.272059
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088496
| 339
| 8
| 85
| 42.375
| 0.880259
| 0.348083
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a3c78a2fc1f872c11dc920a24a6a1190daebb3c3
| 13,955
|
py
|
Python
|
tests/unit/driver/network/test_network_sync_driver.py
|
verbosemode/scrapli
|
b3885169dccf24ac65d0d433eae16bcab8288002
|
[
"MIT"
] | 404
|
2020-02-11T09:05:40.000Z
|
2022-03-31T05:10:03.000Z
|
tests/unit/driver/network/test_network_sync_driver.py
|
verbosemode/scrapli
|
b3885169dccf24ac65d0d433eae16bcab8288002
|
[
"MIT"
] | 155
|
2020-02-18T00:21:43.000Z
|
2022-03-06T16:34:47.000Z
|
tests/unit/driver/network/test_network_sync_driver.py
|
verbosemode/scrapli
|
b3885169dccf24ac65d0d433eae16bcab8288002
|
[
"MIT"
] | 48
|
2020-04-02T00:24:44.000Z
|
2022-03-07T18:24:53.000Z
|
import pytest
from scrapli.exceptions import ScrapliPrivilegeError
def test_escalate(monkeypatch, sync_network_driver):
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "configure terminal"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
sync_network_driver._escalate(
escalate_priv=sync_network_driver.privilege_levels["configuration"]
)
def test_escalate_auth_secondary(monkeypatch, sync_network_driver):
def _send_inputs_interact(cls, interact_events, **kwargs):
assert interact_events[0][0] == "enable"
return b"raw", b"processed"
monkeypatch.setattr(
"scrapli.channel.sync_channel.Channel.send_inputs_interact", _send_inputs_interact
)
# patching send inputs interactive means if this passes we know we had to do an "authy" escalation
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["exec"]
sync_network_driver._escalate(
escalate_priv=sync_network_driver.privilege_levels["privilege_exec"]
)
def test_deescalate(monkeypatch, sync_network_driver):
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "disable"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
sync_network_driver._deescalate(
current_priv=sync_network_driver.privilege_levels["privilege_exec"]
)
def test_acquire_priv_no_action(monkeypatch, sync_network_driver):
monkeypatch.setattr(
"scrapli.channel.sync_channel.Channel.get_prompt",
lambda _, **kwargs: "scrapli#",
)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
sync_network_driver.acquire_priv(desired_priv="privilege_exec")
def test_acquire_priv_escalate(monkeypatch, sync_network_driver):
_prompt_counter = 0
def _get_prompt(cls):
nonlocal _prompt_counter
if _prompt_counter == 0:
prompt = "scrapli#"
else:
prompt = "scrapli(config)#"
_prompt_counter += 1
return prompt
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "configure terminal"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.get_prompt", _get_prompt)
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
sync_network_driver.acquire_priv(desired_priv="configuration")
def test_acquire_priv_deescalate(monkeypatch, sync_network_driver):
_prompt_counter = 0
def _get_prompt(cls):
nonlocal _prompt_counter
if _prompt_counter == 0:
prompt = "scrapli(config)#"
else:
prompt = "scrapli#"
_prompt_counter += 1
return prompt
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "end"
return b"scrapli#", b"scrapli#"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.get_prompt", _get_prompt)
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["configuration"]
sync_network_driver.acquire_priv(desired_priv="privilege_exec")
def test_acquire_priv_failure(monkeypatch, sync_network_driver):
def _get_prompt(cls):
return "scrapli(config)#"
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "end"
return b"scrapli(config)#", b"scrapli(config)#"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.get_prompt", _get_prompt)
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["configuration"]
with pytest.raises(ScrapliPrivilegeError):
sync_network_driver.acquire_priv(desired_priv="privilege_exec")
def test_acquire_appropriate_privilege_level(monkeypatch, sync_network_driver):
_acquire_priv_called = False
def _acquire_priv(cls, **kwargs):
nonlocal _acquire_priv_called
_acquire_priv_called = True
return
# patching acquire priv so we know its called but dont have to worry about that actually
# trying to happen
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver.acquire_priv", _acquire_priv
)
_validate_privilege_level_name_called = False
def _validate_privilege_level_name(cls, **kwargs):
nonlocal _validate_privilege_level_name_called
_validate_privilege_level_name_called = True
return
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver._validate_privilege_level_name",
_validate_privilege_level_name,
)
def _reset_called_flags():
nonlocal _acquire_priv_called, _validate_privilege_level_name_called
_acquire_priv_called = False
_validate_privilege_level_name_called = False
# Test default_desired_privilege_level
_reset_called_flags()
sync_network_driver._acquire_appropriate_privilege_level()
assert _validate_privilege_level_name_called is False
assert _acquire_priv_called is True
# Test the privilege_level is the same as the sync_network_driver._current_priv_level.name
_reset_called_flags()
sync_network_driver._acquire_appropriate_privilege_level(
sync_network_driver._current_priv_level.name
)
assert _validate_privilege_level_name_called is True
assert _acquire_priv_called is False
# Test privilege_level is different that sync_network_driver._current_priv_level.name
_reset_called_flags()
sync_network_driver._acquire_appropriate_privilege_level("configuration")
assert _validate_privilege_level_name_called is True
assert _acquire_priv_called is True
# Test when _generic_driver_mode = True
_reset_called_flags()
sync_network_driver._generic_driver_mode = True
sync_network_driver._acquire_appropriate_privilege_level()
assert _validate_privilege_level_name_called is False
assert _acquire_priv_called is False
# Test when _generic_driver_mode = True and privilege_level is different than _current_priv_level
_reset_called_flags()
sync_network_driver._generic_driver_mode = True
sync_network_driver._acquire_appropriate_privilege_level("configuration")
assert _validate_privilege_level_name_called is True
assert _acquire_priv_called is True
# Test when _generic_driver_mode = True and privilege_level is same as _current_priv_level
_reset_called_flags()
sync_network_driver._generic_driver_mode = True
sync_network_driver._acquire_appropriate_privilege_level(
sync_network_driver._current_priv_level.name
)
assert _validate_privilege_level_name_called is True
assert _acquire_priv_called is False
def test_send_command(monkeypatch, sync_network_driver):
def _acquire_appropriate_privilege_level(cls, **kwargs):
return
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver._acquire_appropriate_privilege_level",
_acquire_appropriate_privilege_level,
)
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "show version"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
actual_response = sync_network_driver.send_command(command="show version")
assert actual_response.failed is False
assert actual_response.result == "processed"
assert actual_response.raw_result == b"raw"
def test_send_commands(monkeypatch, sync_network_driver):
def _acquire_appropriate_privilege_level(cls, **kwargs):
return
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver._acquire_appropriate_privilege_level",
_acquire_appropriate_privilege_level,
)
_command_counter = 0
def _send_input(cls, channel_input, **kwargs):
nonlocal _command_counter
if _command_counter == 0:
assert channel_input == "show version"
else:
assert channel_input == "show run"
_command_counter += 1
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
actual_response = sync_network_driver.send_commands(commands=["show version", "show run"])
assert actual_response.failed is False
assert actual_response[0].result == "processed"
assert actual_response[0].raw_result == b"raw"
def test_send_commands_from_file(fs, monkeypatch, real_ssh_commands_file_path, sync_network_driver):
fs.add_real_file(source_path=real_ssh_commands_file_path, target_path="/commands")
def _acquire_appropriate_privilege_level(cls, **kwargs):
return
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver._acquire_appropriate_privilege_level",
_acquire_appropriate_privilege_level,
)
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "show version"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
actual_response = sync_network_driver.send_commands_from_file(file="commands")
assert actual_response.failed is False
assert actual_response[0].result == "processed"
assert actual_response[0].raw_result == b"raw"
def test_send_interactive(monkeypatch, sync_network_driver):
def _acquire_appropriate_privilege_level(cls, **kwargs):
return
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver._acquire_appropriate_privilege_level",
_acquire_appropriate_privilege_level,
)
def _send_inputs_interact(cls, **kwargs):
return b"raw", b"processed"
monkeypatch.setattr(
"scrapli.channel.sync_channel.Channel.send_inputs_interact", _send_inputs_interact
)
actual_response = sync_network_driver.send_interactive(interact_events=[("nada", "scrapli>")])
assert actual_response.failed is False
assert actual_response.result == "processed"
assert actual_response.raw_result == b"raw"
def test_send_configs(monkeypatch, sync_network_driver):
def _acquire_priv(cls, **kwargs):
return
# patching acquire priv so we know its called but dont have to worry about that actually
# trying to happen
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver.acquire_priv", _acquire_priv
)
_command_counter = 0
def _send_input(cls, channel_input, **kwargs):
nonlocal _command_counter
if _command_counter == 0:
assert channel_input == "interface loopback123"
else:
assert channel_input == "description tests are boring"
_command_counter += 1
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
actual_response = sync_network_driver.send_configs(
configs=["interface loopback123", "description tests are boring"]
)
assert actual_response.failed is False
assert actual_response[0].result == "processed"
assert actual_response[0].raw_result == b"raw"
def test_send_config(monkeypatch, sync_network_driver):
def _acquire_priv(cls, **kwargs):
return
# patching acquire priv so we know its called but dont have to worry about that actually
# trying to happen
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver.acquire_priv", _acquire_priv
)
_command_counter = 0
def _send_input(cls, channel_input, **kwargs):
nonlocal _command_counter
if _command_counter == 0:
assert channel_input == "interface loopback123"
else:
assert channel_input == "description tests are boring"
_command_counter += 1
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
actual_response = sync_network_driver.send_config(
config="interface loopback123\ndescription tests are boring"
)
assert actual_response.failed is False
assert actual_response.result == "processed\nprocessed"
assert actual_response.raw_result == b""
def test_send_configs_from_file(fs, monkeypatch, real_ssh_commands_file_path, sync_network_driver):
fs.add_real_file(source_path=real_ssh_commands_file_path, target_path="/configs")
def _acquire_priv(cls, **kwargs):
return
# patching acquire priv so we know its called but dont have to worry about that actually
# trying to happen
monkeypatch.setattr(
"scrapli.driver.network.sync_driver.NetworkDriver.acquire_priv", _acquire_priv
)
def _send_input(cls, channel_input, **kwargs):
assert channel_input == "show version"
return b"raw", b"processed"
monkeypatch.setattr("scrapli.channel.sync_channel.Channel.send_input", _send_input)
sync_network_driver._current_priv_level = sync_network_driver.privilege_levels["privilege_exec"]
actual_response = sync_network_driver.send_configs_from_file(file="configs")
assert actual_response.failed is False
assert actual_response.result == "show version\nprocessed"
| 36.246753
| 102
| 0.750054
| 1,693
| 13,955
| 5.747785
| 0.076787
| 0.071216
| 0.110061
| 0.062481
| 0.87771
| 0.858288
| 0.830131
| 0.826431
| 0.824787
| 0.811838
| 0
| 0.003035
| 0.17363
| 13,955
| 384
| 103
| 36.341146
| 0.840791
| 0.067718
| 0
| 0.712121
| 0
| 0
| 0.189164
| 0.1159
| 0
| 0
| 0
| 0
| 0.17803
| 1
| 0.155303
| false
| 0
| 0.007576
| 0.034091
| 0.257576
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3dda14900f61e84897cd30907bc4d89324782dd
| 31,492
|
py
|
Python
|
tensorflow/python/kernel_tests/unicode_decode_op_test.py
|
abhaikollara/tensorflow
|
4f96df3659696990cb34d0ad07dc67843c4225a9
|
[
"Apache-2.0"
] | 56
|
2018-06-21T13:47:23.000Z
|
2020-05-13T09:31:47.000Z
|
tensorflow/python/kernel_tests/unicode_decode_op_test.py
|
abhaikollara/tensorflow
|
4f96df3659696990cb34d0ad07dc67843c4225a9
|
[
"Apache-2.0"
] | 58
|
2021-11-22T05:41:28.000Z
|
2022-01-19T01:33:40.000Z
|
tensorflow/python/kernel_tests/unicode_decode_op_test.py
|
abhaikollara/tensorflow
|
4f96df3659696990cb34d0ad07dc67843c4225a9
|
[
"Apache-2.0"
] | 15
|
2018-09-06T14:18:32.000Z
|
2020-05-14T06:35:30.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for unicode_decode and unicode_decode_with_splits."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_string_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_string_ops
from tensorflow.python.platform import test
def _nested_encode(x, encoding):
"""Encode each string in a nested list with `encoding`."""
if isinstance(x, list):
return [_nested_encode(v, encoding) for v in x]
else:
return x.encode(encoding)
def _nested_codepoints(x):
"""Replace each string in a nested list with a list of its codepoints."""
# Works for Python 2 and 3, and for both UCS2 and UCS4 builds
if isinstance(x, list):
return [_nested_codepoints(v) for v in x]
else:
b = list(x.encode("utf-32-be"))
if any(isinstance(c, str) for c in b):
b = [ord(c) for c in b]
return [(b0 << 24) + (b1 << 16) + (b2 << 8) + b3
for b0, b1, b2, b3 in zip(b[::4], b[1::4], b[2::4], b[3::4])]
def _nested_offsets(x, encoding):
"""Replace each string in a nested list with a list of start offsets."""
if isinstance(x, list):
return [_nested_offsets(v, encoding) for v in x]
else:
if not x:
return []
encoded_x = x.encode("utf-32-be")
encoded_chars = [encoded_x[i:i + 4] for i in range(0, len(encoded_x), 4)]
char_lens = [
len(c.decode("utf-32-be").encode(encoding)) for c in encoded_chars
]
return [0] + np.cumsum(char_lens).tolist()[:-1]
def _nested_splitchars(x, encoding):
"""Replace each string in a nested list with a list of char substrings."""
if isinstance(x, list):
return [_nested_splitchars(v, encoding) for v in x]
else:
b = x.encode("utf-32-be")
chars = zip(b[::4], b[1::4], b[2::4], b[3::4])
if str is bytes:
return [b"".join(c).decode("utf-32-be").encode(encoding) for c in chars]
else:
return [bytes(c).decode("utf-32-be").encode(encoding) for c in chars]
def _make_sparse_tensor(indices, values, dense_shape, dtype=np.int32):
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64), np.array(values, dtype),
np.array(dense_shape, np.int64))
@test_util.run_all_in_graph_and_eager_modes
class UnicodeDecodeTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def testScalarDecode(self):
text = constant_op.constant(u"仅今年前".encode("utf-8"))
chars = ragged_string_ops.unicode_decode(text, "utf-8")
self.assertAllEqual(chars, [ord(c) for c in u"仅今年前"])
def testScalarDecodeWithOffset(self):
text = constant_op.constant(u"仅今年前".encode("utf-8"))
chars, starts = ragged_string_ops.unicode_decode_with_offsets(text, "utf-8")
self.assertAllEqual(chars, [ord(c) for c in u"仅今年前"])
self.assertAllEqual(starts, [0, 3, 6, 9])
def testVectorDecode(self):
text = constant_op.constant([u"仅今年前".encode("utf-8"), b"hello"])
chars = ragged_string_ops.unicode_decode(text, "utf-8")
expected_chars = [[ord(c) for c in u"仅今年前"],
[ord(c) for c in u"hello"]]
self.assertAllEqual(chars, expected_chars)
def testVectorDecodeWithOffset(self):
text = constant_op.constant([u"仅今年前".encode("utf-8"), b"hello"])
chars, starts = ragged_string_ops.unicode_decode_with_offsets(text, "utf-8")
expected_chars = [[ord(c) for c in u"仅今年前"],
[ord(c) for c in u"hello"]]
self.assertAllEqual(chars, expected_chars)
self.assertAllEqual(starts, [[0, 3, 6, 9], [0, 1, 2, 3, 4]])
@parameterized.parameters([
{"texts": u"仅今年前"},
{"texts": [u"G\xf6\xf6dnight", u"\U0001f60a"]},
{"texts": ["Hello", "world", "", u"👍"]},
{"texts": [["Hi", "there"], ["", u"\U0001f60a"]], "ragged_rank": 0},
{"texts": [["Hi", "there", ""], [u"😊"]], "ragged_rank": 1},
{"texts": [[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]], "ragged_rank": 2},
{"texts": []}
]) # pyformat: disable
def testBasicDecode(self, texts, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_decode(input_tensor, "UTF-8")
expected = _nested_codepoints(texts)
self.assertAllEqual(expected, result)
@parameterized.parameters([
{"texts": u"仅今年前"},
{"texts": [u"G\xf6\xf6dnight", u"\U0001f60a"]},
{"texts": ["Hello", "world", "", u"👍"]},
{"texts": [["Hi", "there"], ["", u"\U0001f60a"]], "ragged_rank": 0},
{"texts": [["Hi", "there", ""], [u"😊"]], "ragged_rank": 1},
{"texts": [[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]], "ragged_rank": 2},
{"texts": []}
]) # pyformat: disable
def testBasicDecodeWithOffsets(self, texts, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_decode_with_offsets(
input_tensor, "UTF-8")
expected_codepoints = _nested_codepoints(texts)
expected_offsets = _nested_offsets(texts, "UTF-8")
self.assertAllEqual(expected_codepoints, result[0])
self.assertAllEqual(expected_offsets, result[1])
def testDocstringExamples(self):
texts = [s.encode("utf8") for s in [u"G\xf6\xf6dnight", u"\U0001f60a"]]
codepoints1 = ragged_string_ops.unicode_decode(texts, "UTF-8")
codepoints2, offsets = ragged_string_ops.unicode_decode_with_offsets(
texts, "UTF-8")
self.assertAllEqual(
codepoints1, [[71, 246, 246, 100, 110, 105, 103, 104, 116], [128522]])
self.assertAllEqual(
codepoints2, [[71, 246, 246, 100, 110, 105, 103, 104, 116], [128522]])
self.assertAllEqual(offsets, [[0, 1, 3, 5, 6, 7, 8, 9, 10], [0]])
@parameterized.parameters([
dict(
texts=["Hello", "world", "", u"👍"],
expected=_make_sparse_tensor(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [0, 4], [1, 0], [1, 1],
[1, 2], [1, 3], [1, 4], [3, 0]],
values=[72, 101, 108, 108, 111, 119, 111, 114, 108, 100, 128077],
dense_shape=[4, 5])),
dict(
texts=[["Hi", "there"], ["", u"\U0001f60a"]],
expected=_make_sparse_tensor(
indices=[[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [0, 1, 2],
[0, 1, 3], [0, 1, 4], [1, 1, 0]],
values=[72, 105, 116, 104, 101, 114, 101, 128522],
dense_shape=[2, 2, 5])),
dict(
texts=[],
expected=_make_sparse_tensor(np.zeros([0, 2], np.int64), [], [0, 0])),
])
def testDecodeWithSparseOutput(self, texts, expected):
input_tensor = np.array(_nested_encode(texts, "UTF-8"), dtype=bytes)
result = ragged_string_ops.unicode_decode(input_tensor, "UTF-8").to_sparse()
self.assertIsInstance(result, sparse_tensor.SparseTensor)
self.assertAllEqual(expected.indices, result.indices)
self.assertAllEqual(expected.values, result.values)
self.assertAllEqual(expected.dense_shape, result.dense_shape)
@parameterized.parameters([
dict(
texts=["Hello", "world", "", u"👍"],
expected=[[72, 101, 108, 108, 111], [119, 111, 114, 108, 100],
[-1, -1, -1, -1, -1], [0x1F44D, -1, -1, -1, -1]]),
dict(
texts=[["Hi", "there"], ["", u"\U0001f60a"]],
expected=[[[72, 105, -1, -1, -1], [116, 104, 101, 114, 101]],
[[-1, -1, -1, -1, -1], [128522, -1, -1, -1, -1]]],
ragged_rank=0),
dict(
texts=[["Hi", "there", ""], [u"😊"]],
expected=[[[72, 105, -1, -1, -1],
[116, 104, 101, 114, 101],
[-1, -1, -1, -1, -1]],
[[128522, -1, -1, -1, -1],
[-1, -1, -1, -1, -1],
[-1, -1, -1, -1, -1]]]),
dict(
texts=[[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]],
expected=[
[[[128522, -1, -1], [129312, 129488, -1]],
[[-1, -1, -1], [-1, -1, -1]]],
[[[129299, 128123, 129302], [-1, -1, -1]],
[[-1, -1, -1], [-1, -1, -1]]]]),
dict(texts=[], expected=np.zeros([0, 0], np.int64)),
]) # pyformat: disable
def testDecodeWithPaddedOutput(self, texts, expected, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_decode(
input_tensor, "UTF-8").to_tensor(default_value=-1)
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
expected=[[0xFFFD],
[ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0xFFFD, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
replacement_char=0,
expected=[[0], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="ignore",
expected=[[], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]]),
dict(
input=[b"\x00", b"hello", b"==\x01==", b"world"],
input_encoding="UTF-8",
replace_control_characters=True,
expected=[[0xFFFD],
[ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[61, 61, 65533, 61, 61],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]]),
dict(
input=[b"\x00", b"hello", b"==\x01==", b"world"],
input_encoding="UTF-8",
replace_control_characters=True,
replacement_char=0,
expected=[[0], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]]),
]) # pyformat: disable
def testErrorModes(self, expected=None, **args):
result = ragged_string_ops.unicode_decode(**args)
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
expected=[[0xFFFD],
[ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0xFFFD, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
replacement_char=0,
expected=[[0], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="ignore",
expected=[[], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]],
expected_offsets=[[], [0, 1, 2, 3, 4],
[0, 1, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\x00", b"hello", b"==\x01==", b"world"],
input_encoding="UTF-8",
replace_control_characters=True,
expected=[[0xFFFD],
[ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[ord('='), ord('='), 0xFFFD, ord('='), ord('=')],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\x00", b"hello", b"==\x01==", b"world"],
input_encoding="UTF-8",
replace_control_characters=True,
replacement_char=0,
expected=[[0], [ord('h'), ord('e'), ord('l'), ord('l'), ord('o')],
[0x3D, 0x3D, 0, 0x3D, 0x3D],
[ord('w'), ord('o'), ord('r'), ord('l'), ord('d')]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\xD8\x01"],
input_encoding="UTF-8",
replacement_char=0x41,
expected=[[0x41, 1]],
expected_offsets=[[0, 1]]),
]) # pyformat: disable
def testErrorModesWithOffsets(self,
expected=None,
expected_offsets=None,
**args):
result = ragged_string_ops.unicode_decode_with_offsets(**args)
self.assertAllEqual(result[0], expected)
self.assertAllEqual(result[1], expected_offsets)
@parameterized.parameters(
("UTF-8", [u"こんにちは", u"你好", u"Hello"]),
("UTF-16-BE", [u"こんにちは", u"你好", u"Hello"]),
("UTF-32-BE", [u"こんにちは", u"你好", u"Hello"]),
("US-ASCII", [u"Hello", "world"]),
("ISO-8859-1", [u"ÀÈÓ", "AEO"]),
("SHIFT-JIS", [u"Hello", u"こんにちは"]),
)
def testDecodeWithDifferentEncodings(self, encoding, texts):
expected = _nested_codepoints(texts)
input_tensor = constant_op.constant(_nested_encode(texts, encoding))
result = ragged_string_ops.unicode_decode(input_tensor, encoding)
self.assertAllEqual(expected, result)
@parameterized.parameters(
("UTF-8", [u"こんにちは", u"你好", u"Hello"]),
("UTF-16-BE", [u"こんにちは", u"你好", u"Hello"]),
("UTF-32-BE", [u"こんにちは", u"你好", u"Hello"]),
("US-ASCII", [u"Hello", "world"]),
("ISO-8859-1", [u"ÀÈÓ", "AEO"]),
("SHIFT-JIS", [u"Hello", u"こんにちは"]),
)
def testDecodeWithOffsetsWithDifferentEncodings(self, encoding, texts):
expected_codepoints = _nested_codepoints(texts)
expected_offsets = _nested_offsets(texts, encoding)
input_tensor = constant_op.constant(_nested_encode(texts, encoding))
result = ragged_string_ops.unicode_decode_with_offsets(
input_tensor, encoding)
self.assertAllEqual(expected_codepoints, result[0])
self.assertAllEqual(expected_offsets, result[1])
@parameterized.parameters([
dict(input=[b"\xFEED"],
errors="strict",
input_encoding="UTF-8",
exception=errors.InvalidArgumentError,
message="Invalid formatting on input string"),
dict(input="x",
input_encoding="UTF-8",
replacement_char=11141111,
exception=errors.InvalidArgumentError,
message="replacement_char out of unicode codepoint range"),
dict(input="x",
input_encoding="UTF-8",
errors="oranguatan",
exception=(ValueError, errors.InvalidArgumentError)),
]) # pyformat: disable
def testExceptions(self, exception=None, message=None, **args):
with self.assertRaisesRegexp(exception, message):
self.evaluate(ragged_string_ops.unicode_decode(**args))
def testUnknownRankError(self):
if context.executing_eagerly():
return
s = array_ops.placeholder(dtypes.string)
message = "Rank of `input` must be statically known."
with self.assertRaisesRegexp(ValueError, message):
self.evaluate(ragged_string_ops.unicode_decode(s, input_encoding="UTF-8"))
@parameterized.parameters([
dict(
doc="Single string",
input=_nested_encode([u"仅今年前"], "utf-8"),
input_encoding="UTF-8",
expected_char_values=_nested_codepoints(u"仅今年前"),
expected_row_splits=[0, 4],
expected_char_to_byte_starts=[0, 3, 6, 9]),
dict(
doc="Multiple strings",
input=_nested_encode([u"仅今年前", u"你好"], "utf-8"),
input_encoding="UTF-8",
expected_char_values=_nested_codepoints(u"仅今年前你好"),
expected_row_splits=[0, 4, 6],
expected_char_to_byte_starts=[0, 3, 6, 9, 0, 3]),
dict(
doc="errors=replace",
input=b"=\xFE=",
input_encoding="UTF-8",
errors="replace",
expected_char_values=[0x3D, 0xFFFD, 0x3D],
expected_row_splits=[0, 3],
expected_char_to_byte_starts=[0, 1, 2]),
dict(
doc="errors=ignore",
input=b"=\xFE=",
input_encoding="UTF-8",
errors="ignore",
expected_char_values=[61, 61],
expected_row_splits=[0, 2],
expected_char_to_byte_starts=[0, 2]),
])
def testDecodeGenOp(self,
doc,
expected_row_splits=None,
expected_char_values=None,
expected_char_to_byte_starts=None,
**args):
"""Test for the c++ interface (gen_string_ops.unicode_decode)."""
result = gen_string_ops.unicode_decode_with_offsets(**args)
self.assertAllEqual(expected_row_splits, result.row_splits)
self.assertAllEqual(expected_char_values, result.char_values)
self.assertAllEqual(expected_char_to_byte_starts,
result.char_to_byte_starts)
@test_util.run_all_in_graph_and_eager_modes
class UnicodeSplitTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def testScalarSplit(self):
text = constant_op.constant(u"仅今年前".encode("UTF-8"))
chars = ragged_string_ops.unicode_split(text, "UTF-8")
self.assertAllEqual(chars, [c.encode("UTF-8") for c in u"仅今年前"])
def testScalarSplitWithOffset(self):
text = constant_op.constant(u"仅今年前".encode("UTF-8"))
chars, starts = ragged_string_ops.unicode_split_with_offsets(text, "UTF-8")
self.assertAllEqual(chars, [c.encode("UTF-8") for c in u"仅今年前"])
self.assertAllEqual(starts, [0, 3, 6, 9])
def testVectorSplit(self):
text = constant_op.constant([u"仅今年前".encode("UTF-8"), b"hello"])
chars = ragged_string_ops.unicode_split(text, "UTF-8")
expected_chars = [[c.encode("UTF-8") for c in u"仅今年前"],
[c.encode("UTF-8") for c in u"hello"]]
self.assertAllEqual(chars, expected_chars)
def testVectorSplitWithOffset(self):
text = constant_op.constant([u"仅今年前".encode("UTF-8"), b"hello"])
chars, starts = ragged_string_ops.unicode_split_with_offsets(text, "UTF-8")
expected_chars = [[c.encode("UTF-8") for c in u"仅今年前"],
[c.encode("UTF-8") for c in u"hello"]]
self.assertAllEqual(chars, expected_chars)
self.assertAllEqual(starts, [[0, 3, 6, 9], [0, 1, 2, 3, 4]])
@parameterized.parameters([
{"texts": u"仅今年前"},
{"texts": [u"G\xf6\xf6dnight", u"\U0001f60a"]},
{"texts": ["Hello", "world", "", u"👍"]},
{"texts": [["Hi", "there"], ["", u"\U0001f60a"]], "ragged_rank": 0},
{"texts": [["Hi", "there", ""], [u"😊"]], "ragged_rank": 1},
{"texts": [[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]], "ragged_rank": 2},
{"texts": []}
]) # pyformat: disable
def testBasicSplit(self, texts, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_split(input_tensor, "UTF-8")
expected = _nested_splitchars(texts, "UTF-8")
self.assertAllEqual(expected, result)
@parameterized.parameters([
{"texts": u"仅今年前"},
{"texts": [u"G\xf6\xf6dnight", u"\U0001f60a"]},
{"texts": ["Hello", "world", "", u"👍"]},
{"texts": [["Hi", "there"], ["", u"\U0001f60a"]], "ragged_rank": 0},
{"texts": [["Hi", "there", ""], [u"😊"]], "ragged_rank": 1},
{"texts": [[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]], "ragged_rank": 2},
{"texts": []}
]) # pyformat: disable
def testBasicSplitWithOffsets(self, texts, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_split_with_offsets(input_tensor, "UTF-8")
expected_codepoints = _nested_splitchars(texts, "UTF-8")
expected_offsets = _nested_offsets(texts, "UTF-8")
self.assertAllEqual(expected_codepoints, result[0])
self.assertAllEqual(expected_offsets, result[1])
def testDocstringExamples(self):
texts = [s.encode("utf8") for s in [u"G\xf6\xf6dnight", u"\U0001f60a"]]
codepoints1 = ragged_string_ops.unicode_split(texts, "UTF-8")
codepoints2, offsets = ragged_string_ops.unicode_split_with_offsets(
texts, "UTF-8")
self.assertAllEqual(
codepoints1,
[[b"G", b"\xc3\xb6", b"\xc3\xb6", b"d", b"n", b"i", b"g", b"h", b"t"],
[b"\xf0\x9f\x98\x8a"]])
self.assertAllEqual(
codepoints2,
[[b"G", b"\xc3\xb6", b"\xc3\xb6", b"d", b"n", b"i", b"g", b"h", b"t"],
[b"\xf0\x9f\x98\x8a"]])
self.assertAllEqual(offsets, [[0, 1, 3, 5, 6, 7, 8, 9, 10], [0]])
@parameterized.parameters([
dict(
texts=["Hello", "world", "", u"👍"],
expected=_make_sparse_tensor(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [0, 4], [1, 0], [1, 1],
[1, 2], [1, 3], [1, 4], [3, 0]],
values=[b"H", b"e", b"l", b"l", b"o",
b"w", b"o", b"r", b"l", b"d", b"\xf0\x9f\x91\x8d"],
dense_shape=[4, 5],
dtype=bytes)),
dict(
texts=[["Hi", "there"], ["", u"\U0001f60a"]],
expected=_make_sparse_tensor(
indices=[[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [0, 1, 2],
[0, 1, 3], [0, 1, 4], [1, 1, 0]],
values=[b"H", b"i", b"t", b"h", b"e", b"r", b"e",
b"\xf0\x9f\x98\x8a"],
dense_shape=[2, 2, 5],
dtype=bytes)),
dict(
texts=[],
expected=_make_sparse_tensor(
np.zeros([0, 2], np.int64), [], [0, 0], dtype=bytes)),
]) # pyformat: disable
def testSplitWithSparseOutput(self, texts, expected):
input_tensor = np.array(_nested_encode(texts, "UTF-8"), dtype=bytes)
result = ragged_string_ops.unicode_split(input_tensor, "UTF-8").to_sparse()
self.assertIsInstance(result, sparse_tensor.SparseTensor)
self.assertAllEqual(expected.indices, result.indices)
self.assertAllEqual(expected.values, result.values)
self.assertAllEqual(expected.dense_shape, result.dense_shape)
@parameterized.parameters([
dict(
texts=["Hello", "world", "", u"👍"],
expected=[[b"H", b"e", b"l", b"l", b"o"],
[b"w", b"o", b"r", b"l", b"d"],
["", "", "", "", ""],
[b"\xf0\x9f\x91\x8d", "", "", "", ""]]),
dict(
texts=[["Hi", "there"], ["", u"\U0001f60a"]],
expected=[[[b"H", b"i", "", "", ""],
[b"t", b"h", b"e", b"r", b"e"]],
[["", "", "", "", ""],
[b"\xf0\x9f\x98\x8a", "", "", "", ""]]],
ragged_rank=0),
dict(
texts=[["Hi", "there", ""], [u"😊"]],
expected=[[[b"H", b"i", "", "", ""],
[b"t", b"h", b"e", b"r", b"e"],
["", "", "", "", ""]],
[[b"\xf0\x9f\x98\x8a", "", "", "", ""],
["", "", "", "", ""],
["", "", "", "", ""]]]),
dict(
texts=[[[u"😊", u"🤠🧐"], []], [[u"🤓👻🤖"]]],
expected=[[[[b"\xf0\x9f\x98\x8a", "", ""],
[b"\xf0\x9f\xa4\xa0", b"\xf0\x9f\xa7\x90", ""]],
[["", "", ""],
["", "", ""]]],
[[[b"\xf0\x9f\xa4\x93", b"\xf0\x9f\x91\xbb",
b"\xf0\x9f\xa4\x96"],
["", "", ""]],
[["", "", ""],
["", "", ""]]]]),
dict(texts=[], expected=np.zeros([0, 0], np.int64)),
]) # pyformat: disable
def testSplitWithPaddedOutput(self, texts, expected, ragged_rank=None):
input_tensor = ragged_factory_ops.constant_value(
_nested_encode(texts, "UTF-8"), ragged_rank=ragged_rank, dtype=bytes)
result = ragged_string_ops.unicode_split(
input_tensor, "UTF-8").to_tensor(default_value="")
self.assertAllEqual(np.array(expected, dtype=bytes), result)
@parameterized.parameters([
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
expected=[[b"\xef\xbf\xbd"],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"\xef\xbf\xbd", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
replacement_char=0,
expected=[[b"\x00"],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"\x00", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="ignore",
expected=[[],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]]),
]) # pyformat: disable
def testErrorModes(self, expected=None, **args):
result = ragged_string_ops.unicode_split(**args)
self.assertAllEqual(expected, result)
@parameterized.parameters([
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
expected=[[b"\xef\xbf\xbd"],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"\xef\xbf\xbd", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="replace",
replacement_char=0,
expected=[[b"\x00"],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"\x00", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]],
expected_offsets=[[0], [0, 1, 2, 3, 4],
[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]]),
dict(
input=[b"\xFE", b"hello", b"==\xFF==", b"world"],
input_encoding="UTF-8",
errors="ignore",
expected=[[],
[b"h", b"e", b"l", b"l", b"o"],
[b"=", b"=", b"=", b"="],
[b"w", b"o", b"r", b"l", b"d"]],
expected_offsets=[[], [0, 1, 2, 3, 4],
[0, 1, 3, 4], [0, 1, 2, 3, 4]]),
]) # pyformat: disable
def testErrorModesWithOffsets(self,
expected=None,
expected_offsets=None,
**args):
result = ragged_string_ops.unicode_split_with_offsets(**args)
self.assertAllEqual(expected, result[0])
self.assertAllEqual(expected_offsets, result[1])
@parameterized.parameters(
("UTF-8", [u"こんにちは", u"你好", u"Hello"]),
("UTF-16-BE", [u"こんにちは", u"你好", u"Hello"]),
("UTF-32-BE", [u"こんにちは", u"你好", u"Hello"]),
)
def testSplitWithDifferentEncodings(self, encoding, texts):
expected = _nested_splitchars(texts, encoding)
input_tensor = constant_op.constant(_nested_encode(texts, encoding))
result = ragged_string_ops.unicode_split(input_tensor, encoding)
self.assertAllEqual(expected, result)
@parameterized.parameters(
("UTF-8", [u"こんにちは", u"你好", u"Hello"]),
("UTF-16-BE", [u"こんにちは", u"你好", u"Hello"]),
("UTF-32-BE", [u"こんにちは", u"你好", u"Hello"]),
)
def testSplitWithOffsetsWithDifferentEncodings(self, encoding, texts):
expected_codepoints = _nested_splitchars(texts, encoding)
expected_offsets = _nested_offsets(texts, encoding)
input_tensor = constant_op.constant(_nested_encode(texts, encoding))
result = ragged_string_ops.unicode_split_with_offsets(
input_tensor, encoding)
self.assertAllEqual(expected_codepoints, result[0])
self.assertAllEqual(expected_offsets, result[1])
@parameterized.parameters([
dict(input=[b"\xFEED"],
errors="strict",
input_encoding="UTF-8",
exception=errors.InvalidArgumentError,
message="Invalid formatting on input string"),
dict(input="x",
input_encoding="UTF-8",
replacement_char=11141111,
exception=errors.InvalidArgumentError,
message="replacement_char out of unicode codepoint range"),
dict(input="x",
input_encoding="UTF-8",
errors="oranguatan",
exception=(ValueError, errors.InvalidArgumentError)),
]) # pyformat: disable
def testExceptions(self, exception=None, message=None, **args):
with self.assertRaisesRegexp(exception, message):
self.evaluate(ragged_string_ops.unicode_split(**args))
def testUnknownRankError(self):
if context.executing_eagerly():
return
s = array_ops.placeholder(dtypes.string)
message = "Rank of `input` must be statically known."
with self.assertRaisesRegexp(ValueError, message):
self.evaluate(ragged_string_ops.unicode_decode(s, input_encoding="UTF-8"))
if __name__ == "__main__":
test.main()
| 43.437241
| 80
| 0.537787
| 3,980
| 31,492
| 4.135427
| 0.086935
| 0.019928
| 0.007473
| 0.042773
| 0.832311
| 0.791117
| 0.763169
| 0.751868
| 0.732487
| 0.711526
| 0
| 0.047154
| 0.257907
| 31,492
| 724
| 81
| 43.497238
| 0.654985
| 0.0429
| 0
| 0.712098
| 0
| 0
| 0.101556
| 0
| 0
| 0
| 0.002892
| 0
| 0.081164
| 1
| 0.05513
| false
| 0
| 0.024502
| 0.001531
| 0.102603
| 0.001531
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ac56aa8f8ef6731c93b78aab8c8512dfbb13b59
| 116
|
py
|
Python
|
src/simulator/learning/offline.py
|
takeitallsource/pac-simulator
|
2c00d878047ec4a0247167e8a7de5aec8b474086
|
[
"MIT"
] | 1
|
2018-07-14T07:09:23.000Z
|
2018-07-14T07:09:23.000Z
|
src/simulator/learning/offline.py
|
takeitallsource/pac-simulator
|
2c00d878047ec4a0247167e8a7de5aec8b474086
|
[
"MIT"
] | null | null | null |
src/simulator/learning/offline.py
|
takeitallsource/pac-simulator
|
2c00d878047ec4a0247167e8a7de5aec8b474086
|
[
"MIT"
] | null | null | null |
from .regime import ImitationLearningRegime
class OfflineImitationLearninRegime(ImitationLearningRegime):
pass
| 23.2
| 61
| 0.862069
| 8
| 116
| 12.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 5
| 62
| 23.2
| 0.961538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
43b36908c8f3d08f5c51661a5210e526a3a7e84b
| 144
|
py
|
Python
|
stock_analysis/__init__.py
|
Chuangye-Wang/StocksAnalysis
|
b3fe68173dd657d521d85ddde6d49537cbdba5ad
|
[
"MIT"
] | null | null | null |
stock_analysis/__init__.py
|
Chuangye-Wang/StocksAnalysis
|
b3fe68173dd657d521d85ddde6d49537cbdba5ad
|
[
"MIT"
] | null | null | null |
stock_analysis/__init__.py
|
Chuangye-Wang/StocksAnalysis
|
b3fe68173dd657d521d85ddde6d49537cbdba5ad
|
[
"MIT"
] | null | null | null |
## __init__.py file
from stock_analysis.core import *
from stock_analysis.price_plot import *
from stock_analysis.technique import *
| 18
| 40
| 0.756944
| 19
| 144
| 5.315789
| 0.578947
| 0.267327
| 0.504951
| 0.455446
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180556
| 144
| 7
| 41
| 20.571429
| 0.855932
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
78d9ebd7a06d2df13959790d4ca8e37779c26e84
| 44,379
|
py
|
Python
|
flipper_thrift/python/feature_flag_store/FeatureFlagStore.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 82
|
2019-04-03T16:09:04.000Z
|
2022-03-29T23:48:31.000Z
|
flipper_thrift/python/feature_flag_store/FeatureFlagStore.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 17
|
2019-04-16T17:17:36.000Z
|
2021-02-25T22:06:01.000Z
|
flipper_thrift/python/feature_flag_store/FeatureFlagStore.py
|
rusintez/flipper-client
|
cd00ae1a3582c5cb7e661c5aa9b8a7b65b35a9e0
|
[
"Apache-2.0"
] | 12
|
2019-07-29T20:07:28.000Z
|
2022-03-29T21:10:15.000Z
|
# Copyright 2018 eShares, Inc. dba Carta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def Create(self, feature_name, is_enabled, client_data):
"""
Parameters:
- feature_name
- is_enabled
- client_data
"""
pass
def Get(self, feature_name):
"""
Parameters:
- feature_name
"""
pass
def Set(self, feature_name, is_enabled):
"""
Parameters:
- feature_name
- is_enabled
"""
pass
def Delete(self, feature_name):
"""
Parameters:
- feature_name
"""
pass
def List(self, limit, offset):
"""
Parameters:
- limit
- offset
"""
pass
def SetMeta(self, feature_name, meta):
"""
Parameters:
- feature_name
- meta
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def Create(self, feature_name, is_enabled, client_data):
"""
Parameters:
- feature_name
- is_enabled
- client_data
"""
self.send_Create(feature_name, is_enabled, client_data)
return self.recv_Create()
def send_Create(self, feature_name, is_enabled, client_data):
self._oprot.writeMessageBegin('Create', TMessageType.CALL, self._seqid)
args = Create_args()
args.feature_name = feature_name
args.is_enabled = is_enabled
args.client_data = client_data
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Create(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Create_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(TApplicationException.MISSING_RESULT, "Create failed: unknown result")
def Get(self, feature_name):
"""
Parameters:
- feature_name
"""
self.send_Get(feature_name)
return self.recv_Get()
def send_Get(self, feature_name):
self._oprot.writeMessageBegin('Get', TMessageType.CALL, self._seqid)
args = Get_args()
args.feature_name = feature_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Get(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Get_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(TApplicationException.MISSING_RESULT, "Get failed: unknown result")
def Set(self, feature_name, is_enabled):
"""
Parameters:
- feature_name
- is_enabled
"""
self.send_Set(feature_name, is_enabled)
self.recv_Set()
def send_Set(self, feature_name, is_enabled):
self._oprot.writeMessageBegin('Set', TMessageType.CALL, self._seqid)
args = Set_args()
args.feature_name = feature_name
args.is_enabled = is_enabled
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Set(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Set_result()
result.read(iprot)
iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def Delete(self, feature_name):
"""
Parameters:
- feature_name
"""
self.send_Delete(feature_name)
self.recv_Delete()
def send_Delete(self, feature_name):
self._oprot.writeMessageBegin('Delete', TMessageType.CALL, self._seqid)
args = Delete_args()
args.feature_name = feature_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_Delete(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = Delete_result()
result.read(iprot)
iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
def List(self, limit, offset):
"""
Parameters:
- limit
- offset
"""
self.send_List(limit, offset)
return self.recv_List()
def send_List(self, limit, offset):
self._oprot.writeMessageBegin('List', TMessageType.CALL, self._seqid)
args = List_args()
args.limit = limit
args.offset = offset
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_List(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = List_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.error is not None:
raise result.error
raise TApplicationException(TApplicationException.MISSING_RESULT, "List failed: unknown result")
def SetMeta(self, feature_name, meta):
"""
Parameters:
- feature_name
- meta
"""
self.send_SetMeta(feature_name, meta)
self.recv_SetMeta()
def send_SetMeta(self, feature_name, meta):
self._oprot.writeMessageBegin('SetMeta', TMessageType.CALL, self._seqid)
args = SetMeta_args()
args.feature_name = feature_name
args.meta = meta
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_SetMeta(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = SetMeta_result()
result.read(iprot)
iprot.readMessageEnd()
if result.error is not None:
raise result.error
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["Create"] = Processor.process_Create
self._processMap["Get"] = Processor.process_Get
self._processMap["Set"] = Processor.process_Set
self._processMap["Delete"] = Processor.process_Delete
self._processMap["List"] = Processor.process_List
self._processMap["SetMeta"] = Processor.process_SetMeta
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_Create(self, seqid, iprot, oprot):
args = Create_args()
args.read(iprot)
iprot.readMessageEnd()
result = Create_result()
try:
result.success = self._handler.Create(args.feature_name, args.is_enabled, args.client_data)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Create", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Get(self, seqid, iprot, oprot):
args = Get_args()
args.read(iprot)
iprot.readMessageEnd()
result = Get_result()
try:
result.success = self._handler.Get(args.feature_name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Get", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Set(self, seqid, iprot, oprot):
args = Set_args()
args.read(iprot)
iprot.readMessageEnd()
result = Set_result()
try:
self._handler.Set(args.feature_name, args.is_enabled)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Set", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_Delete(self, seqid, iprot, oprot):
args = Delete_args()
args.read(iprot)
iprot.readMessageEnd()
result = Delete_result()
try:
self._handler.Delete(args.feature_name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("Delete", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_List(self, seqid, iprot, oprot):
args = List_args()
args.read(iprot)
iprot.readMessageEnd()
result = List_result()
try:
result.success = self._handler.List(args.limit, args.offset)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("List", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_SetMeta(self, seqid, iprot, oprot):
args = SetMeta_args()
args.read(iprot)
iprot.readMessageEnd()
result = SetMeta_result()
try:
self._handler.SetMeta(args.feature_name, args.meta)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except FlipperException as error:
msg_type = TMessageType.REPLY
result.error = error
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("SetMeta", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class Create_args(object):
"""
Attributes:
- feature_name
- is_enabled
- client_data
"""
def __init__(self, feature_name=None, is_enabled=None, client_data=None,):
self.feature_name = feature_name
self.is_enabled = is_enabled
self.client_data = client_data
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.feature_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.is_enabled = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.client_data = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Create_args')
if self.feature_name is not None:
oprot.writeFieldBegin('feature_name', TType.STRING, 1)
oprot.writeString(self.feature_name.encode('utf-8') if sys.version_info[0] == 2 else self.feature_name)
oprot.writeFieldEnd()
if self.is_enabled is not None:
oprot.writeFieldBegin('is_enabled', TType.BOOL, 2)
oprot.writeBool(self.is_enabled)
oprot.writeFieldEnd()
if self.client_data is not None:
oprot.writeFieldBegin('client_data', TType.STRING, 3)
oprot.writeString(self.client_data.encode('utf-8') if sys.version_info[0] == 2 else self.client_data)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Create_args)
Create_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'feature_name', 'UTF8', None, ), # 1
(2, TType.BOOL, 'is_enabled', None, None, ), # 2
(3, TType.STRING, 'client_data', 'UTF8', None, ), # 3
)
class Create_result(object):
"""
Attributes:
- success
- error
"""
def __init__(self, success=None, error=None,):
self.success = success
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = FeatureFlagStoreItem()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Create_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Create_result)
Create_result.thrift_spec = (
(0, TType.STRUCT, 'success', [FeatureFlagStoreItem, None], None, ), # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
class Get_args(object):
"""
Attributes:
- feature_name
"""
def __init__(self, feature_name=None,):
self.feature_name = feature_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.feature_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Get_args')
if self.feature_name is not None:
oprot.writeFieldBegin('feature_name', TType.STRING, 1)
oprot.writeString(self.feature_name.encode('utf-8') if sys.version_info[0] == 2 else self.feature_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Get_args)
Get_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'feature_name', 'UTF8', None, ), # 1
)
class Get_result(object):
"""
Attributes:
- success
- error
"""
def __init__(self, success=None, error=None,):
self.success = success
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = FeatureFlagStoreItem()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Get_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Get_result)
Get_result.thrift_spec = (
(0, TType.STRUCT, 'success', [FeatureFlagStoreItem, None], None, ), # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
class Set_args(object):
"""
Attributes:
- feature_name
- is_enabled
"""
def __init__(self, feature_name=None, is_enabled=None,):
self.feature_name = feature_name
self.is_enabled = is_enabled
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.feature_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.is_enabled = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Set_args')
if self.feature_name is not None:
oprot.writeFieldBegin('feature_name', TType.STRING, 1)
oprot.writeString(self.feature_name.encode('utf-8') if sys.version_info[0] == 2 else self.feature_name)
oprot.writeFieldEnd()
if self.is_enabled is not None:
oprot.writeFieldBegin('is_enabled', TType.BOOL, 2)
oprot.writeBool(self.is_enabled)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Set_args)
Set_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'feature_name', 'UTF8', None, ), # 1
(2, TType.BOOL, 'is_enabled', None, None, ), # 2
)
class Set_result(object):
"""
Attributes:
- error
"""
def __init__(self, error=None,):
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Set_result')
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Set_result)
Set_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
class Delete_args(object):
"""
Attributes:
- feature_name
"""
def __init__(self, feature_name=None,):
self.feature_name = feature_name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.feature_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Delete_args')
if self.feature_name is not None:
oprot.writeFieldBegin('feature_name', TType.STRING, 1)
oprot.writeString(self.feature_name.encode('utf-8') if sys.version_info[0] == 2 else self.feature_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Delete_args)
Delete_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'feature_name', 'UTF8', None, ), # 1
)
class Delete_result(object):
"""
Attributes:
- error
"""
def __init__(self, error=None,):
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('Delete_result')
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(Delete_result)
Delete_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
class List_args(object):
"""
Attributes:
- limit
- offset
"""
def __init__(self, limit=None, offset=None,):
self.limit = limit
self.offset = offset
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.limit = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.offset = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('List_args')
if self.limit is not None:
oprot.writeFieldBegin('limit', TType.I64, 1)
oprot.writeI64(self.limit)
oprot.writeFieldEnd()
if self.offset is not None:
oprot.writeFieldBegin('offset', TType.I64, 2)
oprot.writeI64(self.offset)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(List_args)
List_args.thrift_spec = (
None, # 0
(1, TType.I64, 'limit', None, None, ), # 1
(2, TType.I64, 'offset', None, None, ), # 2
)
class List_result(object):
"""
Attributes:
- success
- error
"""
def __init__(self, success=None, error=None,):
self.success = success
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype26, _size23) = iprot.readListBegin()
for _i27 in range(_size23):
_elem28 = FeatureFlagStoreItem()
_elem28.read(iprot)
self.success.append(_elem28)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('List_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter29 in self.success:
iter29.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(List_result)
List_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [FeatureFlagStoreItem, None], False), None, ), # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
class SetMeta_args(object):
"""
Attributes:
- feature_name
- meta
"""
def __init__(self, feature_name=None, meta=None,):
self.feature_name = feature_name
self.meta = meta
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.feature_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.meta = FeatureFlagStoreMeta()
self.meta.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetMeta_args')
if self.feature_name is not None:
oprot.writeFieldBegin('feature_name', TType.STRING, 1)
oprot.writeString(self.feature_name.encode('utf-8') if sys.version_info[0] == 2 else self.feature_name)
oprot.writeFieldEnd()
if self.meta is not None:
oprot.writeFieldBegin('meta', TType.STRUCT, 2)
self.meta.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(SetMeta_args)
SetMeta_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'feature_name', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'meta', [FeatureFlagStoreMeta, None], None, ), # 2
)
class SetMeta_result(object):
"""
Attributes:
- error
"""
def __init__(self, error=None,):
self.error = error
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.error = FlipperException()
self.error.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('SetMeta_result')
if self.error is not None:
oprot.writeFieldBegin('error', TType.STRUCT, 1)
self.error.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(SetMeta_result)
SetMeta_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'error', [FlipperException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| 33.569592
| 134
| 0.586133
| 4,773
| 44,379
| 5.213074
| 0.050283
| 0.041998
| 0.028213
| 0.023149
| 0.85797
| 0.833494
| 0.818986
| 0.799775
| 0.786995
| 0.784021
| 0
| 0.005955
| 0.311363
| 44,379
| 1,321
| 135
| 33.595004
| 0.80822
| 0.036278
| 0
| 0.808967
| 0
| 0
| 0.031899
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114035
| false
| 0.005848
| 0.007797
| 0.035088
| 0.217349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60dd861a7e47d41be7741b109478331002aa1412
| 104,851
|
py
|
Python
|
test/azure/low-level/Expected/AcceptanceTests/LroLowLevel/lrolowlevel/rest/lros/_request_builders_py3.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 35
|
2018-04-03T12:15:53.000Z
|
2022-03-11T14:03:34.000Z
|
test/azure/low-level/Expected/AcceptanceTests/LroLowLevel/lrolowlevel/rest/lros/_request_builders_py3.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 652
|
2017-08-28T22:44:41.000Z
|
2022-03-31T21:20:31.000Z
|
test/azure/low-level/Expected/AcceptanceTests/LroLowLevel/lrolowlevel/rest/lros/_request_builders_py3.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 29
|
2017-08-28T20:57:01.000Z
|
2022-03-11T14:03:38.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional
from azure.core.rest import HttpRequest
from msrest import Serializer
_SERIALIZER = Serializer()
def build_put200_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/200/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_patch200_succeeded_ignore_headers_request(
*, json: Any = None, content: Any = None, **kwargs: Any
) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request with location header. We
should not have any subsequent calls after receiving this first response.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to patch.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to patch.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/patch/200/succeeded/ignoreheaders")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put201_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/201/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post202_list_request(**kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 202 with empty body to first request, returns a 200
with body [{ 'id': '100', 'name': 'foo' }].
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == [
{
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
]
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/list")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, **kwargs)
def build_put200_succeeded_no_state_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
does not contain ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/200/succeeded/nostate")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put202_retry200_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 202 to the initial request, with a location header
that points to a polling URL that returns a 200 and an entity that doesn't contains
ProvisioningState.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/202/retry/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put201_creating_succeeded200_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/201/creating/succeeded/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put200_updating_succeeded204_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Updating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/200/updating/succeeded/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put201_creating_failed200_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Created’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Failed’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200, 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/201/created/failed/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put200_acceptedcanceled200_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 201 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Canceled’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/200/accepted/canceled/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_no_header_in_retry_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 202 to the initial request with location header.
Subsequent calls to operation status do not contain location header.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/put/noheader/202/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_retry_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putasync/retry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_no_retry_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putasync/noretry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_retry_failed_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putasync/retry/failed")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_no_retrycanceled_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 200 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putasync/noretry/canceled")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_no_header_in_retry_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request, service returns a 202 to the initial request with
Azure-AsyncOperation header. Subsequent calls to operation status do not contain
Azure-AsyncOperation header.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 201
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putasync/noheader/201/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_non_resource_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request with non resource.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. sku to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). sku to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional.
"name": "str" # Optional.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putnonresource/202/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_non_resource_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request with non resource.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Sku to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Sku to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional.
"name": "str" # Optional.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putnonresourceasync/202/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_sub_resource_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request with sub resource.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Sub Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Sub Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putsubresource/202/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_put_async_sub_resource_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running put request with sub resource.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Sub Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Sub Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Sub Resource Id.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
}
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/putsubresourceasync/202/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_delete_provisioning202_accepted200_succeeded_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Accepted’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/provisioning/202/accepted/200/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_provisioning202_deleting_failed200_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Failed’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/provisioning/202/deleting/200/failed")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_provisioning202_deletingcanceled200_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Polls return this value until the last poll returns a
‘200’ with ProvisioningState=’Canceled’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/provisioning/202/deleting/200/canceled")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete204_succeeded_request(**kwargs: Any) -> HttpRequest:
"""Long running delete succeeds and returns right away.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/204/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete202_retry200_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Polls return this
value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/202/retry/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete202_no_retry204_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Polls return this
value until the last poll returns a ‘200’ with ProvisioningState=’Succeeded’.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/202/noretry/204")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_no_header_in_retry_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a location header in the initial request.
Subsequent calls to operation status do not contain location header.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/delete/noheader")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_async_no_header_in_retry_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns an Azure-AsyncOperation header in the initial
request. Subsequent calls to operation status do not contain Azure-AsyncOperation header.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/deleteasync/noheader/202/204")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_async_retry_succeeded_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/deleteasync/retry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_async_no_retry_succeeded_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/deleteasync/noretry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_async_retry_failed_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/deleteasync/retry/failed")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_delete_async_retrycanceled_request(**kwargs: Any) -> HttpRequest:
"""Long running delete request, service returns a 202 to the initial request. Poll the endpoint
indicated in the Azure-AsyncOperation header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/deleteasync/retry/canceled")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=url, headers=header_parameters, **kwargs)
def build_post200_with_payload_request(**kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with 'Location'
header. Poll returns a 200 with a response body after success.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 200, 202
response.json() == {
"id": "str", # Optional.
"name": "str" # Optional.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/post/payload/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, **kwargs)
def build_post202_retry200_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with 'Location' and
'Retry-After' headers, Polls return a 200 with a response body after success.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/post/202/retry/200")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post202_no_retry204_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with 'Location'
header, 204 with noresponse body after success.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/post/202/noretry/204")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post_double_headers_final_location_get_request(**kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should poll Location to get the final
object.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/LROPostDoubleHeadersFinalLocationGet")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, **kwargs)
def build_post_double_headers_final_azure_header_get_request(**kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the
final object.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/LROPostDoubleHeadersFinalAzureHeaderGet")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, **kwargs)
def build_post_double_headers_final_azure_header_get_default_request(**kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request with both Location and
Azure-Async header. Poll Azure-Async and it's success. Should NOT poll Location to get the
final object if you support initial Autorest behavior.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# response body for status code(s): 202
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/LROPostDoubleHeadersFinalAzureHeaderGetDefault")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, **kwargs)
def build_post_async_retry_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/postasync/retry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post_async_no_retry_succeeded_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
# response body for status code(s): 200
response.json() == {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/postasync/noretry/succeeded")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post_async_retry_failed_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/postasync/retry/failed")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
def build_post_async_retrycanceled_request(*, json: Any = None, content: Any = None, **kwargs: Any) -> HttpRequest:
"""Long running post request, service returns a 202 to the initial request, with an entity that
contains ProvisioningState=’Creating’. Poll the endpoint indicated in the Azure-AsyncOperation
header for operation status.
See https://aka.ms/azsdk/python/protocol/quickstart for how to incorporate this request builder
into your code flow.
:keyword json: Pass in a JSON-serializable object (usually a dictionary). See the template in
our example to find the input shape. Product to put.
:paramtype json: any
:keyword content: Pass in binary content you want in the body of the request (typically bytes,
a byte iterator, or stream input). Product to put.
:paramtype content: any
:return: Returns an :class:`~azure.core.rest.HttpRequest` that you will pass to the client's
`send_request` method. See https://aka.ms/azsdk/python/protocol/quickstart for how to
incorporate this response into your code flow.
:rtype: ~azure.core.rest.HttpRequest
Example:
.. code-block:: python
# JSON input template you can fill out and use as your body input.
json = {
"id": "str", # Optional. Resource Id.
"location": "str", # Optional. Resource Location.
"name": "str", # Optional. Resource Name.
"properties": {
"provisioningState": "str", # Optional.
"provisioningStateValues": "str" # Optional. Possible values include: "Succeeded", "Failed", "canceled", "Accepted", "Creating", "Created", "Updating", "Updated", "Deleting", "Deleted", "OK".
},
"tags": {
"str": "str" # Optional. A set of tags. Dictionary of :code:`<string>`.
},
"type": "str" # Optional. Resource Type.
}
"""
content_type = kwargs.pop("content_type", None) # type: Optional[str]
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", "/lro/postasync/retry/canceled")
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
header_parameters["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=url, headers=header_parameters, json=json, content=content, **kwargs)
| 48.052704
| 216
| 0.615225
| 11,746
| 104,851
| 5.438277
| 0.021369
| 0.061649
| 0.057109
| 0.017095
| 0.978208
| 0.977708
| 0.977598
| 0.977097
| 0.976267
| 0.975939
| 0
| 0.006325
| 0.261123
| 104,851
| 2,181
| 217
| 48.074736
| 0.818205
| 0.717714
| 0
| 0.726727
| 0
| 0
| 0.189533
| 0.052794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126126
| false
| 0
| 0.009009
| 0
| 0.261261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
716c979af7ba905f65923a00a4d487cb076db373
| 2,343
|
py
|
Python
|
searchBar/booking/models.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | null | null | null |
searchBar/booking/models.py
|
rajvijen/Industry-Visit-Planning-and-Booking
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 8
|
2019-09-05T04:58:20.000Z
|
2022-01-13T00:58:01.000Z
|
searchBar/booking/models.py
|
rajvijen/ASE-101
|
fe54f9f354eb88cd78b1bc18a61908fe7de03577
|
[
"MIT"
] | 5
|
2018-11-15T19:04:29.000Z
|
2018-11-17T06:10:38.000Z
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
timings = (('1','9:30'),('2','11:30'),('3','14:30'))
class BookingListIndi(models.Model):
user1=models.ForeignKey(User,on_delete=models.CASCADE,null=True)
name_person = models.CharField(max_length=200)
industry_name = models.CharField(max_length=100)
industry_branch=models.CharField(max_length=100,null=True)
email=models.EmailField(null=True)
date_visit = models.DateField(default=timezone.now)
slot_time = models.CharField(max_length=10, choices=timings, default='9:30')
visiting_members = models.IntegerField(default=0)
total_available = models.IntegerField(default=20)
total_taken = models.IntegerField(default = 0)
street_name = models.CharField(max_length=150)
city_name = models.CharField(max_length=100)
pin_code = models.CharField(max_length=10)
code = models.CharField(max_length=20)
visited = models.BooleanField(default=False)
left_days_bool = models.BooleanField(default=True)
def __str__(self):
return self.name_person
class BookingListOrga (models.Model):
user1= models.ForeignKey(User, on_delete=models.CASCADE,null=True)
name_person = models.CharField(max_length=200)
organisation_name = models.CharField(max_length=200)
industry_name = models.CharField(max_length=100)
industry_branch=models.CharField(max_length=100,null=True)
email = models.EmailField(null=True)
date_visit = models.DateField(default=timezone.now)
slot_time = models.CharField(max_length=10, choices=timings, default='9:30')
visiting_members = models.IntegerField(default=0)
total_available = models.IntegerField(default=20)
total_taken = models.IntegerField(default = 0)
street_name = models.CharField(max_length=150)
city_name = models.CharField(max_length=100)
pin_code = models.CharField(max_length=10)
code = models.CharField(max_length=20)
visited = models.BooleanField(default=False)
def __str__(self):
return self.organisation_name
class Tickets(models.Model):
day = models.DateField()
slot = models.CharField(max_length = 10)
ticks = models.IntegerField(default = 20)
def __str__(self):
return self.day
| 42.6
| 81
| 0.727273
| 301
| 2,343
| 5.465116
| 0.252492
| 0.164134
| 0.19696
| 0.262614
| 0.81155
| 0.756839
| 0.756839
| 0.756839
| 0.756839
| 0.756839
| 0
| 0.040224
| 0.161758
| 2,343
| 54
| 82
| 43.388889
| 0.797352
| 0
| 0
| 0.6875
| 0
| 0
| 0.010922
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.0625
| 0.0625
| 0.979167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
718183a4812cec5ffce8a9a68bef1c208aedd38c
| 11,489
|
py
|
Python
|
instagram_private_api/constants.py
|
zenmaldives/instagrampvtapi
|
526f244bae18962b5777e1f2f870699c0e4a8d2d
|
[
"MIT"
] | 19
|
2018-10-09T05:12:04.000Z
|
2022-02-28T04:08:25.000Z
|
instagram_private_api/constants.py
|
zenmaldives/instagrampvtapi
|
526f244bae18962b5777e1f2f870699c0e4a8d2d
|
[
"MIT"
] | null | null | null |
instagram_private_api/constants.py
|
zenmaldives/instagrampvtapi
|
526f244bae18962b5777e1f2f870699c0e4a8d2d
|
[
"MIT"
] | 5
|
2019-02-28T02:31:08.000Z
|
2020-10-21T11:11:35.000Z
|
class Constants(object):
"""Constants holder class that stores the bulk of the fixed strings used in the library."""
IG_SIG_KEY = '99e16edcca71d7c1f3fd74d447f6281bd5253a623000a55ed0b60014467a53b1'
IG_CAPABILITIES = '3brTBw==' # = base64.b64encode(struct.pack('<i', 131316445)).decode('ascii')
SIG_KEY_VERSION = '4'
APP_VERSION = '26.0.0.10.86'
APPLICATION_ID = '567067343352427'
FB_HTTP_ENGINE = 'Liger'
ANDROID_VERSION = 24
ANDROID_RELEASE = '7.0'
PHONE_MANUFACTURER = 'samsung'
PHONE_DEVICE = 'SM-G930F'
PHONE_MODEL = 'herolte'
PHONE_DPI = '640dpi'
PHONE_RESOLUTION = '1440x2560'
PHONE_CHIPSET = 'samsungexynos8890'
USER_AGENT_FORMAT = \
'Instagram %(app_version)s Android (%(android_version)d/%(android_release)s; ' \
'%(dpi)s; %(resolution)s; %(brand)s; %(device)s; %(model)s; %(chipset)s; en_US)'
USER_AGENT_EXPRESSION = \
r'Instagram\s(?P<app_version>[^\s]+)\sAndroid\s\((?P<android_version>[0-9]+)/(?P<android_release>[0-9\.]+);\s' \
r'(?P<dpi>\d+dpi);\s(?P<resolution>\d+x\d+);\s(?P<manufacturer>[^;]+);\s(?P<device>[^;]+);\s' \
r'(?P<model>[^;]+);\s(?P<chipset>[^;]+);'
USER_AGENT = USER_AGENT_FORMAT % {
'app_version': APP_VERSION,
'android_version': ANDROID_VERSION,
'android_release': ANDROID_RELEASE,
'brand': PHONE_MANUFACTURER,
'device': PHONE_DEVICE,
'model': PHONE_MODEL,
'dpi': PHONE_DPI,
'resolution': PHONE_RESOLUTION,
'chipset': PHONE_CHIPSET}
LOGIN_EXPERIMENTS = 'ig_android_sms_consent_in_reg,ig_android_flexible_sampling_universe,ig_android_background_conf_resend_fix,ig_restore_focus_on_reg_textbox_universe,ig_android_analytics_data_loss,ig_android_gmail_oauth_in_reg,ig_android_phoneid_sync_interval,ig_android_stay_at_one_tap_on_error,ig_android_link_to_access_if_email_taken_in_reg,ig_android_non_fb_sso,ig_android_family_apps_user_values_provider_universe,ig_android_reg_inline_errors,ig_android_run_fb_reauth_on_background,ig_fbns_push,ig_android_reg_omnibox,ig_android_show_password_in_reg_universe,ig_android_background_phone_confirmation_v2,ig_fbns_blocked,ig_android_access_redesign,ig_android_please_create_username_universe,ig_android_gmail_oauth_in_access,ig_android_reg_whiteout_redesign_v3' # noqa
EXPERIMENTS = 'ig_android_disk_cache_match_journal_size_to_cache_max_count,ig_android_ad_move_carousel_indicator_to_ufi_universe,ig_android_universe_video_production,ig_android_live_follow_from_comments_universe,ig_android_ad_watchandinstall_universe,ig_android_live_analytics,ig_android_video_captions_universe,ig_android_offline_location_feed,ig_android_ontact_invite_universe,ig_android_insta_video_reconnect_viewers,ig_android_live_broadcast_blacklist,ig_android_checkbox_instead_of_button_as_follow_affordance_universe,ig_android_ufi_redesign_video_social_context,ig_android_stories_surface_universe,ig_android_verified_comments_universe,ig_android_preload_media_ahead_in_current_reel,android_instagram_prefetch_suggestions_universe,ig_android_direct_inbox_tray_suggested_user_universe,ig_android_direct_blue_tab,ig_android_light_status_bar_universe,ig_android_asset_button_new_content_animation,ig_android_async_network_tweak_universe,ig_android_react_native_lazy_modules_killswitch,ig_android_instavideo_remove_nux_comments,ig_video_copyright_whitelist,ig_android_ad_sponsor_label_story_top_design_universe,ig_android_business_action,ig_android_direct_link_style,ig_android_live_heart_enhancements_universe,ig_android_preload_item_count_in_reel_viewer_buffer,ig_android_auto_retry_post_mode,ig_android_fix_render_thread_crash,ig_android_shopping,ig_fbns_preload_default,ig_android_gesture_dismiss_reel_viewer,ig_android_ad_logger_funnel_logging_universe,ig_android_direct_links,ig_android_links_receivers,ig_android_ad_impression_backtest,ig_android_offline_freshness_toast_10_12,ig_android_invites_without_token_universe,ig_android_immersive_viewer,ig_android_mqtt_skywalker,ig_fbns_push,ig_android_react_native_universe,ig_android_special_brush,ig_android_live_consumption_abr,ig_android_story_viewer_social_context,ig_android_explore_verified_badges_stories_universe,ig_android_video_loopcount_int,ig_android_enable_main_feed_reel_tray_preloading,ig_android_ad_watchbrowse_universe,ig_android_react_native_ota,ig_android_discover_people_icon_in_others_profile,ig_android_log_mediacodec_info,ig_android_enable_back_navigation_nux_universe,ig_android_cold_start_feed_request,ig_video_use_sve_universe,ig_android_offline_explore_10_14,ig_android_stories_teach_gallery_location,ig_android_http_stack_experiment_2017,ig_android_stories_device_tilt,ig_android_pending_request_search_bar,ig_android_fb_topsearch_sgp_fork_request,ig_android_animation_perf_reporter_timeout,ig_android_new_block_flow,ig_android_direct_address_links,ig_android_share_profile_photo_to_feed_universe,ig_android_stories_private_likes,ig_android_text_background,ig_android_stories_video_prefetch_kb,ig_android_su_activity_feed,ig_android_live_stop_broadcast_on_404,ig_android_render_iframe_interval,ig_android_boomerang_entry,ig_android_camera_shortcut_universe,ig_android_fetch_fresh_viewer_list,ig_android_ad_media_url_logging_universe,ig_android_phone_confirm_rate_limit_language_universe,ig_android_keep_http_cache_on_user_switch,ig_android_facebook_twitter_profile_photos,ig_android_full_user_detail_endpoint,ig_android_direct_sqlite_universe,ig_android_family_bridge_share,ig_android_search,ig_android_insta_video_consumption_titles,ig_android_live_notification_control,ig_android_camera_universe,ig_android_instavideo_audio_only_mode,ig_android_live_video_reactions_consumption_universe,ig_android_swipe_fragment_container,ig_creation_growth_holdout,ig_android_live_save_to_camera_roll_universe,ig_android_ad_cta_redesign_universe,ig_android_sticker_region_tracking,ig_android_unified_inbox,ig_android_offline_main_feed_10_11,ig_android_chaining_teaser_animation,ig_android_business_conversion_value_prop_v2,ig_android_redirect_to_low_latency_universe,ig_android_feed_header_profile_ring_universe,ig_family_bridges_holdout_universe,ig_android_following_follower_social_context,ig_android_video_keep_screen_on,ig_android_profile_photo_as_media,ig_android_insta_video_consumption_infra,ig_android_sms_consent_in_edit_profile,ig_android_infinite_scrolling_launch,ig_in_feed_commenting,ig_android_live_broadcast_enable_year_class_2011,ig_android_direct_phone_number_links,ig_android_direct_share_sheet_ring,ig_android_stories_weblink_creation,ig_android_histogram_reporter,ig_android_network_cancellation,ig_android_react_native_insights,ig_android_insta_video_audio_encoder,ig_android_family_bridge_bookmarks,ig_android_dash_for_vod_universe,ig_android_direct_mutually_exclusive_experiment_universe,ig_android_stories_selfie_sticker,ig_android_ad_add_per_event_counter_to_logging_event,ig_android_rtl,ig_android_direct_send_auto_retry,ig_android_direct_video_autoplay_scroll,ig_android_promote_from_profile_button,ig_android_share_spinner,ig_android_profile_share_username,ig_android_sidecar_edit_screen_universe,ig_promotions_unit_in_insights_landing_page,ig_android_save_longpress_tooltip,ig_android_constrain_image_size_universe,ig_android_business_new_graphql_endpoint_universe,ig_ranking_following,ig_android_universe_reel_video_production,ig_android_sfplt,ig_android_offline_hashtag_feed,ig_android_live_skin_smooth,ig_android_stories_posting_offline_ui,ig_android_direct_add_local_thread_in_inbox,ig_android_swipe_navigation_x_angle_universe,ig_android_offline_mode_holdout,ig_android_non_square_first,ig_android_insta_video_drawing,ig_android_react_native_usertag,ig_android_swipeablefilters_universe,ig_android_analytics_logger_running_background_universe,ig_android_save_all,ig_android_reel_viewer_data_buffer_size,ig_android_disk_cache_has_sanity_check,ig_direct_quality_holdout_universe,ig_android_family_bridge_discover,ig_android_react_native_restart_after_error_universe,ig_story_tray_peek_content_universe,ig_android_profile,ig_android_high_res_upload_2,ig_android_http_service_same_thread,ig_android_remove_followers_universe,ig_android_skip_video_render,ig_android_live_viewer_comment_prompt_universe,ig_android_search_client_matching,ig_explore_netego,ig_android_boomerang_feed_attribution,ig_android_explore_story_sfslt_universe,ig_android_rendering_controls,ig_android_os_version_blocking,ig_android_encoder_width_safe_multiple_16,ig_android_direct_video_autoplay,ig_android_snippets_profile_nux,ig_android_e2e_optimization_universe,ig_android_disk_usage,ig_android_save_collections,ig_android_live_see_fewer_videos_like_this_universe,ig_android_live_view_profile_from_comments_universe,ig_formats_and_feedbacks_holdout_universe,ig_fbns_blocked,ig_android_instavideo_periodic_notif,ig_android_empty_feed_redesign,ig_android_marauder_update_frequency,ig_android_suggest_password_reset_on_oneclick_login,ig_android_live_special_codec_size_list,ig_android_enable_share_to_messenger,ig_android_live_video_reactions_creation_universe,ig_android_live_hide_viewer_nux,ig_android_channels_home,ig_android_sidecar_gallery_universe,ig_android_live_using_webrtc,ig_android_insta_video_broadcaster_infra_perf,ig_android_business_conversion_social_context,android_ig_fbns_kill_switch,ig_android_retry_story_seen_state,ig_android_react_native_universe_kill_switch,ig_android_stories_book_universe,ig_android_all_videoplayback_persisting_sound,ig_android_cache_layer_bytes_threshold,ig_android_comment_deep_linking_v1,ig_android_business_promotion,ig_android_anrwatchdog,ig_android_qp_kill_switch,ig_android_ad_always_send_ad_attribution_id_universe,ig_android_2fac,ig_direct_bypass_group_size_limit_universe,ig_android_promote_simplified_flow,ig_android_share_to_whatsapp,ig_fbns_dump_ids,ig_android_ad_show_mai_cta_loading_state_universe,ig_android_skywalker_live_event_start_end,ig_android_toplive_verified_badges_universe,ig_android_live_join_comment_ui_change,ig_android_draw_button_new_tool_animation,ig_video_max_duration_qe_preuniverse,ig_android_http_stack_kz_debug,ig_request_cache_layer,ig_android_carousel_feed_indicators_universe,ig_android_new_optic,ig_android_mark_reel_seen_on_Swipe_forward,ig_fbns_shared,ig_android_capture_slowmo_mode,ig_android_save_multi_select,ig_android_mead,ig_android_video_single_surface,ig_android_offline_reel_feed,ig_android_video_download_logging,ig_android_last_edits,ig_android_exoplayer_4142,ig_android_snippets_haptic_feedback,ig_android_gl_drawing_marks_after_undo_backing,ig_android_mark_seen_state_on_viewed_impression,ig_android_live_backgrounded_reminder_universe,ig_android_disable_comment_public_test,ig_android_user_detail_endpoint,ig_android_comment_tweaks_universe,ig_android_add_to_last_post,ig_save_insights,ig_android_live_enhanced_end_screen_universe,ig_android_ad_add_counter_to_logging_event,ig_android_sidecar,ig_android_direct_split_new_message_button,ig_android_grid_video_icon,ig_android_ad_watchandlead_universe,ig_android_progressive_jpeg,ig_android_offline_story_stickers,ig_android_direct_inbox_unseen_hint,ig_android_top_live_titles_universe,ig_android_video_prefetch_for_connectivity_type,ig_android_ad_holdout_16m5_universe,ig_android_sync_on_background_enhanced,ig_android_upload_reliability_use_fbupload_lib,ig_android_samsung_app_badging,ig_android_offline_commenting,ig_android_insta_video_abr_resize,ig_android_insta_video_sound_always_on,ig_android_disable_comment' # noqa
| 261.113636
| 9,149
| 0.923405
| 1,753
| 11,489
| 5.293212
| 0.321734
| 0.242483
| 0.117254
| 0.016381
| 0.065848
| 0.006466
| 0
| 0
| 0
| 0
| 0
| 0.012989
| 0.028375
| 11,489
| 43
| 9,150
| 267.186047
| 0.818239
| 0.014013
| 0
| 0
| 0
| 0.147059
| 0.926937
| 0.901493
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.058824
| 0
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e0b27733a36919c137a52b9cfd8e675e7b35f5f5
| 19
|
py
|
Python
|
test/run/t36.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t36.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t36.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print min(3,8,2,6)
| 9.5
| 18
| 0.631579
| 6
| 19
| 2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 0.105263
| 19
| 1
| 19
| 19
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
461a924bb2a1f09a9e89fb472a737a372a5fd7dc
| 27,726
|
py
|
Python
|
networks/dinknet.py
|
afperezm/DeepGlobe-Road-Extraction-Challenge
|
d3e0a8123d64baa3975663ece053edbc4bbdc4e6
|
[
"MIT"
] | null | null | null |
networks/dinknet.py
|
afperezm/DeepGlobe-Road-Extraction-Challenge
|
d3e0a8123d64baa3975663ece053edbc4bbdc4e6
|
[
"MIT"
] | null | null | null |
networks/dinknet.py
|
afperezm/DeepGlobe-Road-Extraction-Challenge
|
d3e0a8123d64baa3975663ece053edbc4bbdc4e6
|
[
"MIT"
] | null | null | null |
"""
Codes of LinkNet based on https://github.com/snakers4/spacenet-three
"""
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from copy import deepcopy
from functools import partial
from model.attention.CBAM import CBAMBlock
from models.moco2_module import MocoV2
from networks.attention import CrossAttention
from networks import moco
from torchvision.models.resnet import BasicBlock
from torchvision import models
non_linearity = partial(F.relu, inplace=True)
class DBlockMoreDilate(nn.Module):
def __init__(self, channel):
super(DBlockMoreDilate, self).__init__()
self.dilate1 = nn.Conv2d(channel, channel, kernel_size=3, dilation=1, padding=1)
self.dilate2 = nn.Conv2d(channel, channel, kernel_size=3, dilation=2, padding=2)
self.dilate3 = nn.Conv2d(channel, channel, kernel_size=3, dilation=4, padding=4)
self.dilate4 = nn.Conv2d(channel, channel, kernel_size=3, dilation=8, padding=8)
self.dilate5 = nn.Conv2d(channel, channel, kernel_size=3, dilation=16, padding=16)
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
m.bias.data.zero_()
def forward(self, x):
dilate1_out = non_linearity(self.dilate1(x))
dilate2_out = non_linearity(self.dilate2(dilate1_out))
dilate3_out = non_linearity(self.dilate3(dilate2_out))
dilate4_out = non_linearity(self.dilate4(dilate3_out))
dilate5_out = non_linearity(self.dilate5(dilate4_out))
out = x + dilate1_out + dilate2_out + dilate3_out + dilate4_out + dilate5_out
return out
class DBlock(nn.Module):
def __init__(self, channel):
super(DBlock, self).__init__()
self.dilate1 = nn.Conv2d(channel, channel, kernel_size=3, dilation=1, padding=1)
self.dilate2 = nn.Conv2d(channel, channel, kernel_size=3, dilation=2, padding=2)
self.dilate3 = nn.Conv2d(channel, channel, kernel_size=3, dilation=4, padding=4)
self.dilate4 = nn.Conv2d(channel, channel, kernel_size=3, dilation=8, padding=8)
# self.dilate5 = nn.Conv2d(channel, channel, kernel_size=3, dilation=16, padding=16)
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
if m.bias is not None:
m.bias.data.zero_()
def forward(self, x):
dilate1_out = non_linearity(self.dilate1(x))
dilate2_out = non_linearity(self.dilate2(dilate1_out))
dilate3_out = non_linearity(self.dilate3(dilate2_out))
dilate4_out = non_linearity(self.dilate4(dilate3_out))
# dilate5_out = non_linearity(self.dilate5(dilate4_out))
out = x + dilate1_out + dilate2_out + dilate3_out + dilate4_out # + dilate5_out
return out
class DecoderBlock(nn.Module):
def __init__(self, in_channels, n_filters):
super(DecoderBlock, self).__init__()
self.conv1 = nn.Conv2d(in_channels, in_channels // 4, 1)
self.norm1 = nn.BatchNorm2d(in_channels // 4)
self.relu1 = non_linearity
self.deconv2 = nn.ConvTranspose2d(in_channels // 4, in_channels // 4, 3, stride=2, padding=1, output_padding=1)
self.norm2 = nn.BatchNorm2d(in_channels // 4)
self.relu2 = non_linearity
self.conv3 = nn.Conv2d(in_channels // 4, n_filters, 1)
self.norm3 = nn.BatchNorm2d(n_filters)
self.relu3 = non_linearity
def forward(self, x):
x = self.conv1(x)
x = self.norm1(x)
x = self.relu1(x)
x = self.deconv2(x)
x = self.norm2(x)
x = self.relu2(x)
x = self.conv3(x)
x = self.norm3(x)
x = self.relu3(x)
return x
class DLinkNet18(nn.Module):
def __init__(self, backbone='seco-1m', num_classes=1):
super(DLinkNet18, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet18(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet18(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet18(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet18(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.d_block = DBlock(512)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Center
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet18HeadsV1(nn.Module):
def __init__(self, backbone='seco-1m', num_classes=1):
super(DLinkNet18HeadsV1, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet18(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet18(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet18(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet18(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.encoder5 = nn.Sequential(
BasicBlock(512, 512, stride=2, downsample=nn.Sequential(
nn.Conv2d(512, 512, kernel_size=1, stride=2, bias=False),
nn.BatchNorm2d(512)
),
groups=1,
base_width=64, dilation=1,
norm_layer=nn.BatchNorm2d),
BasicBlock(512, 512, stride=1, downsample=None,
groups=1,
base_width=64, dilation=1,
norm_layer=nn.BatchNorm2d)
)
self.head1 = moco.resnet18_heads(large=True, index=0)
self.head2 = moco.resnet18_heads(large=True, index=1)
self.head3 = moco.resnet18_heads(large=True, index=2)
self.feat_encoder = nn.Sequential(
nn.Conv2d(filters[3], filters[3], kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(filters[3]),
nn.ReLU(inplace=True)
)
self.gate_encoder = nn.Sequential(
nn.Conv2d(6, filters[3], kernel_size=1, stride=1),
nn.BatchNorm2d(filters[3]),
nn.ReLU(inplace=True)
)
self.join_encoder = nn.Sequential(
nn.Conv2d(2 * filters[3], filters[3], kernel_size=1, stride=1),
nn.BatchNorm2d(filters[3]),
nn.ReLU(inplace=True)
)
self.d_block = DBlock(512)
self.decoder6 = DecoderBlock(filters[3], filters[3])
self.decoder5 = DecoderBlock(filters[3], filters[3])
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
e5 = self.encoder5(e4)
h0 = self.head1(e5)
h1 = self.head2(e5)
h2 = self.head3(e5)
h = torch.cat([h0, h1, h2], dim=1)
h = h.view(-1, 6, 8, 8)
f = self.feat_encoder(e5)
h = self.gate_encoder(h)
g = torch.cat((f, h), 1)
g = self.join_encoder(g)
# Center
e6 = self.d_block(g)
# Decoder
d6 = self.decoder6(e6) + e5
d5 = self.decoder5(d6) + e4
d4 = self.decoder4(d5) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet18HeadsV2(nn.Module):
def __init__(self, backbone='seco-1m', num_classes=1):
super(DLinkNet18HeadsV2, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet18(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet18(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet18(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet18(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.head1 = moco.resnet18_heads(large=True, index=0)
self.head2 = moco.resnet18_heads(large=True, index=1)
self.head3 = moco.resnet18_heads(large=True, index=2)
self.merge_encoder = nn.Sequential(
nn.Conv2d(3 * filters[1] + filters[3], filters[3], kernel_size=3, stride=1),
nn.BatchNorm2d(filters[3]),
nn.ReLU(inplace=True)
)
self.d_block = DBlock(512)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
h0 = self.head1(e4)
h1 = self.head2(e4)
h2 = self.head3(e4)
eb, ec, eh, ew = e4.size()
h = torch.cat([h0, h1, h2], dim=1)
h = torch.unsqueeze(torch.unsqueeze(h, dim=2), dim=2)
h = h.repeat(1, 1, eh, ew)
e4 = torch.cat((h, e4), dim=1)
e4 = self.merge_encoder(e4)
# Center
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet18HeadsV3(nn.Module):
def __init__(self, backbone='seco-1m', num_classes=1):
super(DLinkNet18HeadsV3, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet18(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet18(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet18(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet18(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
# self.head1 = moco.resnet18_heads(large=True, index=0, decode=True)
self.head2 = moco.resnet18_heads(large=True, index=1, decode=True)
# self.head3 = moco.resnet18_heads(large=True, index=2, decode=True)
self.a_block = CBAMBlock(channel=filters[3], reduction=filters[3] // filters[0], kernel_size=3)
self.d_block = DBlock(512)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# h0 = self.head1(e4)
h1 = self.head2(e4)
# h2 = self.head3(e4)
# h0 = torch.unsqueeze(torch.unsqueeze(h0, 2), 3)
h1 = torch.unsqueeze(torch.unsqueeze(h1, 2), 3)
# h2 = torch.unsqueeze(torch.unsqueeze(h2, 2), 3)
# Center
e4 = e4 + e4 * h1
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class CBAMDLinkNet18(nn.Module):
def __init__(self, backbone='seco-1m', num_classes=1):
super(CBAMDLinkNet18, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet18(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet18(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet18(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet18(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.a_block = CBAMBlock(channel=filters[3], reduction=filters[3] // filters[0], kernel_size=3)
self.d_block = DBlock(512)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Center
e4 = self.a_block(e4)
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet34LessPool(nn.Module):
def __init__(self, num_classes=1):
super(DLinkNet34LessPool, self).__init__()
filters = [64, 128, 256, 512]
resnet = models.resnet34(pretrained=True)
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.d_block = DBlockMoreDilate(256)
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
# Center
e3 = self.d_block(e3)
# Decoder
d3 = self.decoder3(e3) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
# Final Classification
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet34(nn.Module):
def __init__(self, backbone='imagenet', num_classes=1, num_channels=3):
super(DLinkNet34, self).__init__()
filters = [64, 128, 256, 512]
if backbone == 'random':
resnet = models.resnet34(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet34(pretrained=True)
elif backbone == 'seco-1m':
resnet = moco.resnet34()
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.d_block = DBlock(512)
# self.d_block4 = DBlock(512)
# self.d_block3 = DBlock(256)
# self.d_block2 = DBlock(128)
# self.d_block1 = DBlock(64)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Center
e4 = self.d_block(e4)
# e4 = self.d_block4(e4)
# e3 = self.d_block3(e3)
# e2 = self.d_block2(e2)
# e1 = self.d_block1(e1)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet50(nn.Module):
def __init__(self, backbone='imagenet', num_classes=1):
super(DLinkNet50, self).__init__()
filters = [256, 512, 1024, 2048]
if backbone == 'random':
resnet = models.resnet50(pretrained=False)
elif backbone == 'imagenet':
resnet = models.resnet50(pretrained=True)
elif backbone == 'seco-100k':
resnet = moco.resnet50(large=False)
elif backbone == 'seco-1m':
resnet = moco.resnet50(large=True)
else:
raise ValueError()
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.d_block = DBlockMoreDilate(2048)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Center
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class DLinkNet101(nn.Module):
def __init__(self, num_classes=1):
super(DLinkNet101, self).__init__()
filters = [256, 512, 1024, 2048]
resnet = models.resnet101(pretrained=True)
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.d_block = DBlockMoreDilate(2048)
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 4, 2, 1)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3, padding=1)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 3, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Center
e4 = self.d_block(e4)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
class LinkNet34(nn.Module):
def __init__(self, num_classes=1):
super(LinkNet34, self).__init__()
filters = [64, 128, 256, 512]
resnet = models.resnet34(pretrained=True)
self.first_conv = resnet.conv1
self.first_bn = resnet.bn1
self.first_relu = resnet.relu
self.first_max_pool = resnet.maxpool
self.encoder1 = resnet.layer1
self.encoder2 = resnet.layer2
self.encoder3 = resnet.layer3
self.encoder4 = resnet.layer4
self.decoder4 = DecoderBlock(filters[3], filters[2])
self.decoder3 = DecoderBlock(filters[2], filters[1])
self.decoder2 = DecoderBlock(filters[1], filters[0])
self.decoder1 = DecoderBlock(filters[0], filters[0])
self.final_deconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2)
self.final_relu1 = non_linearity
self.final_conv2 = nn.Conv2d(32, 32, 3)
self.final_relu2 = non_linearity
self.final_conv3 = nn.Conv2d(32, num_classes, 2, padding=1)
def forward(self, x):
# Encoder
x = self.first_conv(x)
x = self.first_bn(x)
x = self.first_relu(x)
x = self.first_max_pool(x)
e1 = self.encoder1(x)
e2 = self.encoder2(e1)
e3 = self.encoder3(e2)
e4 = self.encoder4(e3)
# Decoder
d4 = self.decoder4(e4) + e3
d3 = self.decoder3(d4) + e2
d2 = self.decoder2(d3) + e1
d1 = self.decoder1(d2)
out = self.final_deconv1(d1)
out = self.final_relu1(out)
out = self.final_conv2(out)
out = self.final_relu2(out)
out = self.final_conv3(out)
return torch.sigmoid(out)
| 33.244604
| 119
| 0.595326
| 3,540
| 27,726
| 4.530226
| 0.061864
| 0.05612
| 0.037413
| 0.020577
| 0.86793
| 0.854524
| 0.845046
| 0.831702
| 0.822847
| 0.808692
| 0
| 0.072764
| 0.28623
| 27,726
| 833
| 120
| 33.284514
| 0.737595
| 0.033831
| 0
| 0.800955
| 0
| 0
| 0.009426
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041401
| false
| 0
| 0.019108
| 0
| 0.101911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1ca336e12279ea5c12ac6b554f44e9109464e62f
| 116
|
py
|
Python
|
app/models/__init__.py
|
Ethan-Ceng/Flask-CMS
|
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
Ethan-Ceng/Flask-CMS
|
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
Ethan-Ceng/Flask-CMS
|
2fe2664e9ae60affe277e9c3b50c18a2e32e422b
|
[
"MIT"
] | null | null | null |
from app.models.admin_user import AdminUser
from app.models.user import User
from app.models.project import Project
| 29
| 43
| 0.844828
| 19
| 116
| 5.105263
| 0.421053
| 0.216495
| 0.402062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 3
| 44
| 38.666667
| 0.932692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1cbd069d7def231d3e30f60b3facaf8276b0a999
| 6,178
|
py
|
Python
|
VL-T5/src/refcoco_model.py
|
ylsung/VL_adapter
|
287409f383f89a11764fc45806864693a4d3e498
|
[
"MIT"
] | 41
|
2021-12-14T02:50:16.000Z
|
2022-03-30T07:41:19.000Z
|
VL-T5/src/refcoco_model.py
|
rebedy/VL-T5
|
1799110fe55ad3badc031fe2a3718c1ba61b4fc5
|
[
"MIT"
] | 1
|
2022-01-07T03:31:47.000Z
|
2022-03-25T00:31:53.000Z
|
VL-T5/src/refcoco_model.py
|
rebedy/VL-T5
|
1799110fe55ad3badc031fe2a3718c1ba61b4fc5
|
[
"MIT"
] | 2
|
2021-12-14T03:10:18.000Z
|
2022-03-29T04:59:23.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from modeling_t5 import VLT5
class VLT5RefCOCO(VLT5):
def __init__(self, config):
super().__init__(config)
def train_step(self, batch):
device = next(self.parameters()).device
vis_feats = batch['vis_feats'].to(device)
input_ids = batch['input_ids'].to(device)
vis_pos = batch['boxes'].to(device)
vis_attention_mask = batch['vis_attention_mask'].to(device)
B, V_L = vis_feats.size()[:2]
lm_labels = batch["target_ids"].to(device)
output = self(
input_ids=input_ids,
vis_inputs=(vis_feats, vis_pos),
vis_attention_mask=vis_attention_mask,
labels=lm_labels,
return_dict=True
)
assert 'loss' in output
lm_mask = (lm_labels != -100).float()
B, L = lm_labels.size()
loss = output['loss']
loss = loss.view(B, L) * lm_mask
loss = loss.sum(dim=1) / lm_mask.sum(dim=1).clamp(min=1) # B
loss_mask = batch['exists_target'].to(device=device)
loss = (loss * loss_mask).sum() / loss_mask.sum().clamp(min=1)
result = {
'loss': loss
}
with torch.no_grad():
logits = output['logits'].detach()
# logits = logits.view(B, 2, self.config.vocab_size)
logits = logits.view(B, L, self.config.vocab_size)
# target = lm_labels[:, 0].view(B)
pred = logits[:, 0].argmax(dim=1).view(B)
# correct = pred == target
pred = pred.cpu().numpy()
correct = np.zeros([B])
for i in range(B):
correct[i] = pred[i] in batch['all_target_ids'][i]
result['pred'] = pred
result['correct'] = correct
return result
@torch.no_grad()
def test_step(self, batch):
self.eval()
device = next(self.parameters()).device
vis_feats = batch['vis_feats'].to(device)
input_ids = batch['input_ids'].to(device)
vis_pos = batch['boxes'].to(device)
vis_attention_mask = batch['vis_attention_mask'].to(device)
B, V_L = vis_feats.size()[:2]
decoder_input_ids = torch.ones(B, 1, dtype=torch.long, device=device) * self.config.decoder_start_token_id
output = self(
input_ids=input_ids,
vis_inputs=(vis_feats, vis_pos),
vis_attention_mask=vis_attention_mask,
decoder_input_ids=decoder_input_ids,
return_dict=True
)
logits = output['logits'].detach()
logits = logits.view(B, self.config.vocab_size)
pred = logits.argmax(dim=1).view(B)
pred = pred.cpu().numpy()
correct = np.zeros([B])
for i in range(B):
correct[i] = pred[i] in batch['all_target_ids'][i]
result = {}
result['pred'] = pred
result['correct'] = correct
return result
from modeling_bart import VLBart
from collections import defaultdict
class VLBartRefCOCO(VLBart):
def __init__(self, config):
super().__init__(config)
out_map = defaultdict(lambda: -1)
for i in range(100):
out_map[f'<vis_extra_id_{i}>'] = i
self.out_map = out_map
def train_step(self, batch):
self.train()
device = next(self.parameters()).device
vis_feats = batch['vis_feats'].to(device)
input_ids = batch['input_ids'].to(device)
vis_pos = batch['boxes'].to(device)
vis_attention_mask = batch['vis_attention_mask'].to(device)
B, V_L = vis_feats.size()[:2]
lm_labels = batch["target_ids"].to(device)
output = self(
input_ids=input_ids,
attention_mask=input_ids.ne(self.config.pad_token_id),
vis_inputs=(vis_feats, vis_pos),
vis_attention_mask=vis_attention_mask,
labels=lm_labels,
reduce_loss=True,
return_dict=True
)
assert 'loss' in output
# lm_mask = (lm_labels != -100).float()
B, L = lm_labels.size()
loss = output['loss']
result = {
'loss': loss
}
with torch.no_grad():
logits = output['logits'].detach().view(B, L, self.lm_head.out_features)[:, 1]
logits = logits.view(B, self.lm_head.out_features)
pred = logits.argmax(dim=1).view(B)
pred = pred.cpu().numpy()
pred = self.lm_head.out_features - pred - 1
correct = np.zeros([B])
for i in range(B):
correct[i] = pred[i] in batch['all_targets'][i]
result['pred'] = pred
result['correct'] = correct
return result
@torch.no_grad()
def test_step(self, batch):
self.eval()
device = next(self.parameters()).device
vis_feats = batch['vis_feats'].to(device)
input_ids = batch['input_ids'].to(device)
vis_pos = batch['boxes'].to(device)
vis_attention_mask = batch['vis_attention_mask'].to(device)
B, V_L = vis_feats.size()[:2]
decoder_input_ids = torch.tensor(
[self.config.decoder_start_token_id, self.config.bos_token_id],
dtype=torch.long, device=device).unsqueeze(0).expand(B, 2)
output = self(
input_ids=input_ids,
attention_mask=input_ids.ne(self.config.pad_token_id),
vis_inputs=(vis_feats, vis_pos),
vis_attention_mask=vis_attention_mask,
decoder_input_ids=decoder_input_ids,
return_dict=True
)
logits = output['logits'].detach().view(B, 2, self.lm_head.out_features)[:, 1]
logits = logits.view(B, self.lm_head.out_features)
pred = logits.argmax(dim=1).view(B)
pred = pred.cpu().numpy()
pred = self.lm_head.out_features - pred - 1
correct = np.zeros([B])
for i in range(B):
correct[i] = pred[i] in batch['all_targets'][i]
result = {}
result['pred'] = pred
result['correct'] = correct
return result
| 29.419048
| 114
| 0.571544
| 799
| 6,178
| 4.186483
| 0.130163
| 0.057399
| 0.076532
| 0.023318
| 0.820329
| 0.786547
| 0.766218
| 0.747085
| 0.736921
| 0.736921
| 0
| 0.008527
| 0.297669
| 6,178
| 209
| 115
| 29.559809
| 0.762388
| 0.023956
| 0
| 0.766667
| 0
| 0
| 0.059263
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 1
| 0.04
| false
| 0
| 0.046667
| 0
| 0.126667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e82cc916210584731597be12b5ba927145ccba85
| 4,424
|
py
|
Python
|
backend/app/utils/parsers/applicant.py
|
LiXuanqi/NuaaOversea-flask-react
|
4c61fb95a4a2f40c1ae3329acc7febd1c78324de
|
[
"MIT"
] | null | null | null |
backend/app/utils/parsers/applicant.py
|
LiXuanqi/NuaaOversea-flask-react
|
4c61fb95a4a2f40c1ae3329acc7febd1c78324de
|
[
"MIT"
] | null | null | null |
backend/app/utils/parsers/applicant.py
|
LiXuanqi/NuaaOversea-flask-react
|
4c61fb95a4a2f40c1ae3329acc7febd1c78324de
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""
File name: applicant.py
Function Des: ...
~~~~~~~~~~
author: 1_x7 <lixuanqi1995@gmail.com> <http://lixuanqi.github.io>
"""
from flask_restful import reqparse
# -------- applicant add parser --------
applicant_post_parser = reqparse.RequestParser()
applicant_post_parser.add_argument(
'user_id',
dest='user_id',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'name',
dest='name',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'student_id',
dest='student_id',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'college',
dest='college',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'major',
dest='major',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'gpa',
dest='gpa',
type=float,
required=True,
)
applicant_post_parser.add_argument(
'language_type',
dest='language_type',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'language_reading',
dest='language_reading',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'language_listening',
dest='language_listening',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'language_speaking',
dest='language_speaking',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'language_writing',
dest='language_writing',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'gre_verbal',
dest='gre_verbal',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'gre_quantitative',
dest='gre_quantitative',
type=int,
required=True,
)
applicant_post_parser.add_argument(
'gre_writing',
dest='gre_writing',
type=float,
required=True,
)
applicant_post_parser.add_argument(
'research_id',
dest='research_id',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'project_id',
dest='project_id',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'recommendation_id',
dest='recommendation_id',
type=str,
required=True,
)
applicant_post_parser.add_argument(
'email',
dest='email',
type=str,
required=False,
)
# -------- applicant update parser --------
applicant_put_parser = reqparse.RequestParser()
applicant_put_parser.add_argument(
'name',
dest='name',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'student_id',
dest='student_id',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'college',
dest='college',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'major',
dest='major',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'gpa',
dest='gpa',
type=float,
required=True,
)
applicant_put_parser.add_argument(
'language_type',
dest='language_type',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'language_reading',
dest='language_reading',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'language_listening',
dest='language_listening',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'language_speaking',
dest='language_speaking',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'language_writing',
dest='language_writing',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'gre_verbal',
dest='gre_verbal',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'gre_quantitative',
dest='gre_quantitative',
type=int,
required=True,
)
applicant_put_parser.add_argument(
'gre_writing',
dest='gre_writing',
type=float,
required=True,
)
applicant_put_parser.add_argument(
'research_id',
dest='research_id',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'project_id',
dest='project_id',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'recommendation_id',
dest='recommendation_id',
type=str,
required=True,
)
applicant_put_parser.add_argument(
'email',
dest='email',
type=str,
required=False,
)
| 16.69434
| 69
| 0.674277
| 514
| 4,424
| 5.490272
| 0.118677
| 0.111623
| 0.210843
| 0.140326
| 0.905032
| 0.890149
| 0.890149
| 0.890149
| 0.890149
| 0.875266
| 0
| 0.001968
| 0.195976
| 4,424
| 265
| 70
| 16.69434
| 0.791397
| 0.053571
| 0
| 0.812207
| 0
| 0
| 0.185096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004695
| 0
| 0.004695
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c0fb238c018d0f926bd37f2fba51736f58f5dd9
| 10,366
|
py
|
Python
|
py_img_seg_eval/unit_tests.py
|
PloxKevin/Weak-Supervision
|
79bd8690309ec161c5e0d0a5715dfa61f7baf786
|
[
"MIT"
] | 10
|
2019-07-24T12:23:05.000Z
|
2022-01-06T04:08:54.000Z
|
py_img_seg_eval/unit_tests.py
|
PloxKevin/Weak-Supervision
|
79bd8690309ec161c5e0d0a5715dfa61f7baf786
|
[
"MIT"
] | null | null | null |
py_img_seg_eval/unit_tests.py
|
PloxKevin/Weak-Supervision
|
79bd8690309ec161c5e0d0a5715dfa61f7baf786
|
[
"MIT"
] | 7
|
2019-04-08T17:46:08.000Z
|
2022-02-25T11:06:59.000Z
|
#!/usr/bin/python
'''
Martin Kersner, m.kersner@gmail.com
2015/11/30
Unit tests for eval_segm.py.
'''
import numpy as np
import eval_segm as es
import unittest
class pixel_accuracy_UnitTests(unittest.TestCase):
'''
Wrong inputs
'''
def test1dInput(self):
mat = np.array([0])
self.assertRaises(IndexError, es.pixel_accuracy, mat, mat)
def testDiffDim(self):
mat0 = np.array([[0,0], [0,0]])
mat1 = np.array([[0,0,0], [0,0,0]])
self.assertRaisesRegexp(es.EvalSegErr, "DiffDim", es.pixel_accuracy, mat0, mat1)
'''
Correct inputs
'''
def testOneClass(self):
segm = np.array([[0,0], [0,0]])
gt = np.array([[0,0], [0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, 1.0)
def testTwoClasses0(self):
segm = np.array([[1,1,1,1,1], [1,1,1,1,1]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, 0)
def testTwoClasses1(self):
segm = np.array([[1,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (9.0)/(10.0))
def testTwoClasses2(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (9.0+0.0)/(9.0+1.0))
def testThreeClasses0(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,2,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (8.0+0.0+0.0)/(8.0+1.0+1.0))
def testThreeClasses1(self):
segm = np.array([[0,2,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (8.0+0.0)/(9.0+1.0))
def testFourClasses0(self):
segm = np.array([[0,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (7.0+0.0)/(9.0+1.0))
def testFourClasses1(self):
segm = np.array([[1,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (7.0+1.0)/(9.0+1.0))
def testFiveClasses0(self):
segm = np.array([[1,2,3,4,3], [0,0,0,0,0]])
gt = np.array([[1,0,3,0,0], [0,0,0,0,0]])
res = es.pixel_accuracy(segm, gt)
self.assertEqual(res, (5.0+1.0+1.0)/(8.0+1.0+1.0))
class mean_accuracy_UnitTests(unittest.TestCase):
'''
Wrong inputs
'''
def test1dInput(self):
mat = np.array([0])
self.assertRaises(IndexError, es.mean_accuracy, mat, mat)
def testDiffDim(self):
mat0 = np.array([[0,0], [0,0]])
mat1 = np.array([[0,0,0], [0,0,0]])
self.assertRaisesRegexp(es.EvalSegErr, "DiffDim", es.mean_accuracy, mat0, mat1)
'''
Correct inputs
'''
def testOneClass(self):
segm = np.array([[0,0], [0,0]])
gt = np.array([[0,0], [0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, 1.0)
def testTwoClasses0(self):
segm = np.array([[1,1,1,1,1], [1,1,1,1,1]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, 0)
def testTwoClasses1(self):
segm = np.array([[1,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, 9.0/10.0)
def testTwoClasses2(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([9.0/9.0, 0.0/1.0]))
def testThreeClasses0(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,2,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([8.0/8.0, 0.0/1.0, 0.0/1.0]))
def testThreeClasses1(self):
segm = np.array([[0,2,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([8.0/9.0, 0.0/1.0]))
def testFourClasses0(self):
segm = np.array([[0,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([7.0/9.0, 0.0/1.0]))
def testFourClasses1(self):
segm = np.array([[1,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([7.0/9.0, 1.0/1.0]))
def testFiveClasses0(self):
segm = np.array([[1,2,3,4,3], [0,0,0,0,0]])
gt = np.array([[1,0,3,0,0], [0,0,0,0,0]])
res = es.mean_accuracy(segm, gt)
self.assertEqual(res, np.mean([5.0/8.0, 1.0, 1.0]))
class mean_IU_UnitTests(unittest.TestCase):
'''
Wrong inputs
'''
def test1dInput(self):
mat = np.array([0])
self.assertRaises(IndexError, es.mean_IU, mat, mat)
def testDiffDim(self):
mat0 = np.array([[0,0], [0,0]])
mat1 = np.array([[0,0,0], [0,0,0]])
self.assertRaisesRegexp(es.EvalSegErr, "DiffDim", es.mean_IU, mat0, mat1)
'''
Correct inputs
'''
def testOneClass(self):
segm = np.array([[0,0], [0,0]])
gt = np.array([[0,0], [0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, 1.0)
def testTwoClasses0(self):
segm = np.array([[1,1,1,1,1], [1,1,1,1,1]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, 0)
def testTwoClasses1(self):
segm = np.array([[1,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([0.9]))
def testTwoClasses2(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([0.9, 0]))
def testThreeClasses0(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,2,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([8.0/10.0, 0, 0]))
def testThreeClasses1(self):
segm = np.array([[0,2,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([8.0/10.0, 0]))
def testFourClasses0(self):
segm = np.array([[0,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([7.0/10.0, 0]))
def testFourClasses1(self):
segm = np.array([[1,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([7.0/9.0, 1]))
def testFiveClasses0(self):
segm = np.array([[1,2,3,4,3], [0,0,0,0,0]])
gt = np.array([[1,0,3,0,0], [0,0,0,0,0]])
res = es.mean_IU(segm, gt)
self.assertEqual(res, np.mean([5.0/8.0, 1, 1.0/2.0]))
class frequency_weighted_IU_UnitTests(unittest.TestCase):
'''
Wrong inputs
'''
def test1dInput(self):
mat = np.array([0])
self.assertRaises(IndexError, es.frequency_weighted_IU, mat, mat)
def testDiffDim(self):
mat0 = np.array([[0,0], [0,0]])
mat1 = np.array([[0,0,0], [0,0,0]])
self.assertRaisesRegexp(es.EvalSegErr, "DiffDim", es.frequency_weighted_IU, mat0, mat1)
'''
Correct inputs
'''
def testOneClass(self):
segm = np.array([[0,0], [0,0]])
gt = np.array([[0,0], [0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, 1.0)
def testTwoClasses0(self):
segm = np.array([[1,1,1,1,1], [1,1,1,1,1]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, 0)
def testTwoClasses1(self):
segm = np.array([[1,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[0,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, (1.0/10.0)*(10.0*9.0/10.0))
def testTwoClasses2(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
# Almost equal!
self.assertAlmostEqual(res, (1.0/10.0)*((9.0*9.0/10.0)+(1.0*0.0/1.0)))
def testThreeClasses0(self):
segm = np.array([[0,0,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,2,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
# Almost equal!
self.assertAlmostEqual(res, (1.0/10.0)*((8.0*8.0/10.0)+(1.0*0.0/1.0)+(1.0*0.0/1.0)))
def testThreeClasses1(self):
segm = np.array([[0,2,0,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
# Almost equal!
self.assertAlmostEqual(res, (1.0/10.0)*((9.0*8.0/10.0)+(1.0*0.0/1.0)))
def testFourClasses0(self):
segm = np.array([[0,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, (1.0/10.0)*((9.0*7.0/10.0)+(1.0*0.0/1.0)))
def testFourClasses1(self):
segm = np.array([[1,2,3,0,0], [0,0,0,0,0]])
gt = np.array([[1,0,0,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, (1.0/10.0)*((9.0*7.0/9.0)+(1.0*1.0/1.0)))
def testFiveClasses0(self):
segm = np.array([[1,2,3,4,3], [0,0,0,0,0]])
gt = np.array([[1,0,3,0,0], [0,0,0,0,0]])
res = es.frequency_weighted_IU(segm, gt)
self.assertEqual(res, (1.0/10.0)*((8.0*5.0/8.0)+(1.0*1.0/1.0)+(1.0*1.0/2.0)))
if __name__ == "__main__":
unittest.main()
| 30.85119
| 95
| 0.516882
| 1,895
| 10,366
| 2.788391
| 0.040106
| 0.203634
| 0.25265
| 0.267979
| 0.957229
| 0.95458
| 0.954201
| 0.952877
| 0.949849
| 0.942468
| 0
| 0.13724
| 0.244357
| 10,366
| 335
| 96
| 30.943284
| 0.537342
| 0.01804
| 0
| 0.792627
| 0
| 0
| 0.003604
| 0
| 0
| 0
| 0
| 0
| 0.202765
| 1
| 0.202765
| false
| 0
| 0.013825
| 0
| 0.235023
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
1c1e9f2aa49dc31eb17cdd0f1c5cac78cb9440a2
| 175
|
py
|
Python
|
06-modules/modules_01/utility.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | 1
|
2021-09-19T04:09:48.000Z
|
2021-09-19T04:09:48.000Z
|
06-modules/modules_01/utility.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
06-modules/modules_01/utility.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
print("In utility: ")
def mul(num1, num2):
return num1 * num2
def divide(num1, num2):
return num1 / num2
class Student():
pass
st1 = Student()
print(type(st1))
| 13.461538
| 23
| 0.634286
| 25
| 175
| 4.44
| 0.56
| 0.288288
| 0.252252
| 0.324324
| 0.396396
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073529
| 0.222857
| 175
| 13
| 24
| 13.461538
| 0.742647
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0
| 0.222222
| 0.555556
| 0.222222
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
1c618aa9bc5b4a33aacbca3568cd33535315068f
| 4,921
|
py
|
Python
|
pyart/io/tests/test_nexrad_level3.py
|
josephhardinee/pyart
|
909cd4a36bb4cae34349294d2013bc7ad71d0969
|
[
"OLDAP-2.6",
"Python-2.0"
] | null | null | null |
pyart/io/tests/test_nexrad_level3.py
|
josephhardinee/pyart
|
909cd4a36bb4cae34349294d2013bc7ad71d0969
|
[
"OLDAP-2.6",
"Python-2.0"
] | null | null | null |
pyart/io/tests/test_nexrad_level3.py
|
josephhardinee/pyart
|
909cd4a36bb4cae34349294d2013bc7ad71d0969
|
[
"OLDAP-2.6",
"Python-2.0"
] | null | null | null |
""" Unit Tests for Py-ART's io/nexrad_level3.py module. """
import numpy as np
from numpy.ma.core import MaskedArray
import pyart
def test_nexrad_level3_msg19():
radar = pyart.io.read_nexrad_level3(pyart.testing.NEXRAD_LEVEL3_MSG19)
assert radar.time['units'] == 'seconds since 2015-01-02T02:05:28Z'
assert radar.time['data'].shape == (360, )
assert round(radar.time['data'][0]) == 0.
assert radar.range['data'].shape == (230, )
assert round(radar.range['data'][100]) == 99900
assert radar.scan_type == 'ppi'
assert radar.latitude['data'].shape == (1, )
assert round(radar.latitude['data'][0]) == 33.0
assert radar.longitude['data'].shape == (1, )
assert round(radar.longitude['data'][0]) == -87.0
assert radar.altitude['data'].shape == (1, )
assert round(radar.altitude['data'][0]) == 759.
assert radar.altitude_agl is None
assert radar.sweep_number['data'].shape == (1, )
assert radar.sweep_number['data'][0] == 0
assert radar.sweep_mode['data'].shape == (1, )
assert radar.sweep_mode['data'].dtype.char == 'S'
assert np.all(radar.sweep_mode['data'] == [b'azimuth_surveillance'])
assert radar.fixed_angle['data'].shape == (1, )
assert round(radar.fixed_angle['data'][0], 2) == 0.50
assert radar.sweep_start_ray_index['data'].shape == (1, )
assert round(radar.sweep_start_ray_index['data'][0]) == 0.0
assert radar.sweep_end_ray_index['data'].shape == (1, )
assert round(radar.sweep_end_ray_index['data'][0]) == 359.0
assert radar.target_scan_rate is None
assert round(radar.azimuth['data'][0]) == 320.0
assert round(radar.azimuth['data'][10]) == 330.0
assert radar.elevation['data'].shape == (360, )
assert round(radar.elevation['data'][0], 2) == 0.50
assert radar.scan_rate is None
assert radar.antenna_transition is None
assert radar.instrument_parameters is None
assert radar.radar_calibration is None
assert radar.ngates == 230
assert radar.nrays == 360
assert radar.nsweeps == 1
assert 'reflectivity' in radar.fields.keys()
assert radar.fields['reflectivity']['data'].shape == (360, 230)
assert type(radar.fields['reflectivity']['data']) is MaskedArray
assert round(radar.fields['reflectivity']['data'][10, 10]) == 25.
def test_nexrad_level3_msg161():
radar = pyart.io.read_nexrad_level3(pyart.testing.NEXRAD_LEVEL3_MSG163)
assert radar.time['units'] == 'seconds since 2015-01-02T02:05:28Z'
assert radar.time['data'].shape == (360, )
assert round(radar.time['data'][0]) == 0.
assert radar.range['data'].shape == (1200, )
assert round(radar.range['data'][100]) == 24975.
assert radar.scan_type == 'ppi'
assert radar.latitude['data'].shape == (1, )
assert round(radar.latitude['data'][0]) == 33.0
assert radar.longitude['data'].shape == (1, )
assert round(radar.longitude['data'][0]) == -87.0
assert radar.altitude['data'].shape == (1, )
assert round(radar.altitude['data'][0]) == 759.
assert radar.altitude_agl is None
assert radar.sweep_number['data'].shape == (1, )
assert radar.sweep_number['data'][0] == 0
assert radar.sweep_mode['data'].shape == (1, )
assert np.all(radar.sweep_mode['data'] == [b'azimuth_surveillance'])
assert radar.fixed_angle['data'].shape == (1, )
assert round(radar.fixed_angle['data'][0], 2) == 0.50
assert radar.sweep_start_ray_index['data'].shape == (1, )
assert round(radar.sweep_start_ray_index['data'][0]) == 0.0
assert radar.sweep_end_ray_index['data'].shape == (1, )
assert round(radar.sweep_end_ray_index['data'][0]) == 359.0
assert radar.target_scan_rate is None
assert round(radar.azimuth['data'][0]) == 329.0
assert round(radar.azimuth['data'][10]) == 339.0
assert radar.elevation['data'].shape == (360, )
assert round(radar.elevation['data'][0], 2) == 0.50
assert radar.scan_rate is None
assert radar.antenna_transition is None
assert radar.instrument_parameters is None
assert radar.radar_calibration is None
assert radar.ngates == 1200
assert radar.nrays == 360
assert radar.nsweeps == 1
field_name = 'specific_differential_phase'
assert field_name in radar.fields.keys()
assert radar.fields[field_name]['data'].shape == (360, 1200)
assert type(radar.fields[field_name]['data']) is MaskedArray
assert round(radar.fields[field_name]['data'][103, 170]) == 2.
def test_nexrad_level3_msg161_fileobj():
fh = open(pyart.testing.NEXRAD_LEVEL3_MSG163, 'rb')
radar = pyart.io.read_nexrad_level3(fh)
fh.close()
field_name = 'specific_differential_phase'
assert field_name in radar.fields.keys()
assert radar.fields[field_name]['data'].shape == (360, 1200)
assert type(radar.fields[field_name]['data']) is MaskedArray
assert round(radar.fields[field_name]['data'][103, 170]) == 2.
| 34.65493
| 75
| 0.666938
| 699
| 4,921
| 4.563662
| 0.160229
| 0.172414
| 0.125392
| 0.080251
| 0.906897
| 0.877429
| 0.847962
| 0.80627
| 0.782445
| 0.782445
| 0
| 0.060901
| 0.16582
| 4,921
| 141
| 76
| 34.900709
| 0.7162
| 0.010364
| 0
| 0.729167
| 0
| 0
| 0.094817
| 0.011107
| 0
| 0
| 0
| 0
| 0.864583
| 1
| 0.03125
| false
| 0
| 0.03125
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c7690a09caa19db7f0a6ae5b39c1c6a3a3ddce2
| 10,880
|
py
|
Python
|
ingenico/connect/sdk/merchant/services/services_client.py
|
festicket/connect-sdk-python3
|
c399c6443789dd978f319c89e1ebd387c812a77b
|
[
"MIT"
] | 12
|
2016-09-26T21:46:31.000Z
|
2020-12-23T18:44:54.000Z
|
ingenico/connect/sdk/merchant/services/services_client.py
|
festicket/connect-sdk-python3
|
c399c6443789dd978f319c89e1ebd387c812a77b
|
[
"MIT"
] | 3
|
2020-05-02T16:53:02.000Z
|
2020-06-02T12:49:51.000Z
|
ingenico/connect/sdk/merchant/services/services_client.py
|
festicket/connect-sdk-python3
|
c399c6443789dd978f319c89e1ebd387c812a77b
|
[
"MIT"
] | 11
|
2017-07-16T00:55:28.000Z
|
2021-09-24T17:00:49.000Z
|
#
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
from ingenico.connect.sdk.api_resource import ApiResource
from ingenico.connect.sdk.response_exception import ResponseException
from ingenico.connect.sdk.domain.errors.error_response import ErrorResponse
from ingenico.connect.sdk.domain.services.bank_details_response import BankDetailsResponse
from ingenico.connect.sdk.domain.services.convert_amount import ConvertAmount
from ingenico.connect.sdk.domain.services.get_iin_details_response import GetIINDetailsResponse
from ingenico.connect.sdk.domain.services.get_privacy_policy_response import GetPrivacyPolicyResponse
from ingenico.connect.sdk.domain.services.test_connection import TestConnection
class ServicesClient(ApiResource):
"""
Services client. Thread-safe.
"""
def __init__(self, parent, path_context):
"""
:param parent: :class:`ingenico.connect.sdk.api_resource.ApiResource`
:param path_context: dict[str, str]
"""
super(ServicesClient, self).__init__(parent, path_context)
def convert_amount(self, query, context=None):
"""
Resource /{merchantId}/services/convert/amount - Convert amount
See also https://epayments-api.developer-ingenico.com/s2sapi/v1/en_US/python/services/convertAmount.html
:param query: :class:`ingenico.connect.sdk.merchant.services.convert_amount_params.ConvertAmountParams`
:param context: :class:`ingenico.connect.sdk.call_context.CallContext`
:return: :class:`ingenico.connect.sdk.domain.services.convert_amount.ConvertAmount`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: GlobalCollectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v1/{merchantId}/services/convert/amount", None)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
ConvertAmount,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def bankaccount(self, body, context=None):
"""
Resource /{merchantId}/services/convert/bankaccount - Convert bankaccount
See also https://epayments-api.developer-ingenico.com/s2sapi/v1/en_US/python/services/bankaccount.html
:param body: :class:`ingenico.connect.sdk.domain.services.bank_details_request.BankDetailsRequest`
:param context: :class:`ingenico.connect.sdk.call_context.CallContext`
:return: :class:`ingenico.connect.sdk.domain.services.bank_details_response.BankDetailsResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: GlobalCollectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v1/{merchantId}/services/convert/bankaccount", None)
try:
return self._communicator.post(
uri,
self._client_headers,
None,
body,
BankDetailsResponse,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def get_iin_details(self, body, context=None):
"""
Resource /{merchantId}/services/getIINdetails - Get IIN details
See also https://epayments-api.developer-ingenico.com/s2sapi/v1/en_US/python/services/getIINdetails.html
:param body: :class:`ingenico.connect.sdk.domain.services.get_iin_details_request.GetIINDetailsRequest`
:param context: :class:`ingenico.connect.sdk.call_context.CallContext`
:return: :class:`ingenico.connect.sdk.domain.services.get_iin_details_response.GetIINDetailsResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: GlobalCollectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v1/{merchantId}/services/getIINdetails", None)
try:
return self._communicator.post(
uri,
self._client_headers,
None,
body,
GetIINDetailsResponse,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def privacypolicy(self, query, context=None):
"""
Resource /{merchantId}/services/privacypolicy - Get privacy policy
See also https://epayments-api.developer-ingenico.com/s2sapi/v1/en_US/python/services/privacypolicy.html
:param query: :class:`ingenico.connect.sdk.merchant.services.privacypolicy_params.PrivacypolicyParams`
:param context: :class:`ingenico.connect.sdk.call_context.CallContext`
:return: :class:`ingenico.connect.sdk.domain.services.get_privacy_policy_response.GetPrivacyPolicyResponse`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: GlobalCollectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v1/{merchantId}/services/privacypolicy", None)
try:
return self._communicator.get(
uri,
self._client_headers,
query,
GetPrivacyPolicyResponse,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
def testconnection(self, context=None):
"""
Resource /{merchantId}/services/testconnection - Test connection
See also https://epayments-api.developer-ingenico.com/s2sapi/v1/en_US/python/services/testconnection.html
:param context: :class:`ingenico.connect.sdk.call_context.CallContext`
:return: :class:`ingenico.connect.sdk.domain.services.test_connection.TestConnection`
:raise: ValidationException if the request was not correct and couldn't be processed (HTTP status code 400)
:raise: AuthorizationException if the request was not allowed (HTTP status code 403)
:raise: ReferenceException if an object was attempted to be referenced that doesn't exist or has been removed,
or there was a conflict (HTTP status code 404, 409 or 410)
:raise: GlobalCollectException if something went wrong at the Ingenico ePayments platform,
the Ingenico ePayments platform was unable to process a message from a downstream partner/acquirer,
or the service that you're trying to reach is temporary unavailable (HTTP status code 500, 502 or 503)
:raise: ApiException if the Ingenico ePayments platform returned any other error
"""
uri = self._instantiate_uri("/v1/{merchantId}/services/testconnection", None)
try:
return self._communicator.get(
uri,
self._client_headers,
None,
TestConnection,
context)
except ResponseException as e:
error_type = ErrorResponse
error_object = self._communicator.marshaller.unmarshal(e.body, error_type)
raise self._create_exception(e.status_code, e.body, error_object, context)
| 58.181818
| 121
| 0.682445
| 1,258
| 10,880
| 5.806041
| 0.125596
| 0.034228
| 0.056681
| 0.047234
| 0.848028
| 0.830641
| 0.827081
| 0.784912
| 0.767662
| 0.717415
| 0
| 0.01674
| 0.247794
| 10,880
| 186
| 122
| 58.494624
| 0.875733
| 0.573989
| 0
| 0.666667
| 1
| 0
| 0.051289
| 0.051289
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.102564
| 0
| 0.25641
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1c7cafe7882a334b33a11f0ed2ef25a7492198cc
| 6,536
|
py
|
Python
|
startup/users/30-user-Fakhraai.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | null | null | null |
startup/users/30-user-Fakhraai.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | 13
|
2018-09-25T19:35:08.000Z
|
2021-01-15T20:42:26.000Z
|
startup/users/30-user-Fakhraai.py
|
NSLS-II-SMI/profile_collection
|
c1e2236a7520f605ac85e7591f05682add06357c
|
[
"BSD-3-Clause"
] | 3
|
2019-09-06T01:40:59.000Z
|
2020-07-01T20:27:39.000Z
|
def run_giwaxs_Fak(t=1):
dets = [pil300KW, pil1M]
xlocs1 = [-22000,3000,21500]
names1 = ['TPD_52nm', 'TPD_42nm', 'TPD_32nm']
#what we run now
curr_tray = xlocs1
curr_names = names1
assert len(curr_tray) == len(curr_names), f'Number of X coordinates ({len(x_list)}) is different from number of samples ({len(samples)})'
waxs_range = [0, 13, 3]
for x, name in zip(curr_tray, curr_names):
yield from bps.mv(piezo.x, x)
yield from bps.mv(piezo.th, 0.1)
yield from alignement_gisaxs(0.1)
plt.close('all')
angle_offset = [0.1]
a_off = piezo.th.position
det_exposure_time(t,t)
name_fmt = '{sample}_{angle}deg'
for j, ang in enumerate( a_off + np.array(angle_offset) ):
yield from bps.mv(piezo.x, (x+j*500))
real_ang = angle_offset[j]
yield from bps.mv(piezo.th, ang)
sample_name = name_fmt.format(sample=name, angle=float('%.3f'%real_ang))
sample_id(user_name='YJ', sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_range)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.5,0.5)
def gFak1(meas_t=1):
username = 'AZ'
names1 = 'aaA_20190926_OG_hot'
dets = [pil1M,pil300KW, rayonix]
angle_offset = [0.1]
length = 17000
x_edge = 31000 #make sure to define the edge as a top border of the sample on the camera using SmarAct X
x_step = 4000
nb_pt = (length - 500) // x_step
xlocs1 = []
for step in range(0, nb_pt+1, 1):
xlocs1 += [np.round(x_edge - step * x_step)]
#what we run now
curr_tray = xlocs1
waxs_range = [0, 13, 3]
yield from bps.mv(piezo.x, x_edge)
yield from bps.mv(piezo.th, 0.1)
yield from alignement_gisaxs(0.1)
a_off = piezo.th.position
plt.close('all')
for i, x in enumerate(curr_tray):
yield from bps.mv(piezo.x, x)
if i != 0:
yield from bps.mv(piezo.th, a_off+ angle_offset[0])
yield from quickalign_gisaxs(0.1)
plt.close('all')
a_off = piezo.th.position
for ii, an in enumerate(angle_offset):
yield from bps.mv(piezo.th, a_off+an)
det_exposure_time(meas_t,meas_t)
#temper = ls.ch1_read.value
name_fmt = '{sample}_x{xlocation}_{angl}deg'
sample_name = name_fmt.format(sample=names1, xlocation=x, angl = an)
sample_id(user_name=username, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_range)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3,0.3)
def gFak2(meas_t=1):
username = 'YJ'
names1 = 'aaA_20190926_OG_middle'
dets = [pil1M,pil300KW, rayonix]
angle_offset = [0.1]
length = 21000
x_edge = 11000 #make sure to define the edge as a top border of the sample on the camera using SmarAct X
x_step = 4000
nb_pt = (length - 500) // x_step
xlocs1 = []
for step in range(0, nb_pt+1, 1):
xlocs1 += [np.round(x_edge - step * x_step)]
#what we run now
curr_tray = xlocs1
waxs_range = [0, 13, 3]
yield from bps.mv(piezo.x, x_edge)
yield from bps.mv(piezo.th, 0.1)
yield from alignement_gisaxs(0.1)
a_off = piezo.th.position
plt.close('all')
for i, x in enumerate(curr_tray):
yield from bps.mv(piezo.x, x)
if i != 0:
yield from bps.mv(piezo.th, a_off+ angle_offset[0])
yield from quickalign_gisaxs(0.1)
plt.close('all')
a_off = piezo.th.position
for ii, an in enumerate(angle_offset):
yield from bps.mv(piezo.th, a_off+an)
det_exposure_time(meas_t,meas_t)
#temper = ls.ch1_read.value
name_fmt = '{sample}_x{xlocation}_{angl}deg'
sample_name = name_fmt.format(sample=names1, xlocation=x, angl = an)
sample_id(user_name=username, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_range)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3,0.3)
def gFak3(meas_t=1):
username = 'AZ'
names1 = 'aaA_20190926_OG_cold'
dets = [pil1M,pil300KW, rayonix]
angle_offset = [0.1]
length = 17000
x_edge = -14000 #make sure to define the edge as a top border of the sample on the camera using SmarAct X
x_step = 4000
nb_pt = (length - 500) // x_step
xlocs1 = []
for step in range(0, nb_pt+1, 1):
xlocs1 += [np.round(x_edge - step * x_step)]
#what we run now
curr_tray = xlocs1
waxs_range = [0, 13, 3]
yield from bps.mv(piezo.x, x_edge)
yield from bps.mv(piezo.th, 0.1)
yield from alignement_gisaxs(0.1)
a_off = piezo.th.position
plt.close('all')
for i, x in enumerate(curr_tray):
yield from bps.mv(piezo.x, x)
if i != 0:
yield from bps.mv(piezo.th, a_off+ angle_offset[0])
yield from quickalign_gisaxs(0.1)
plt.close('all')
a_off = piezo.th.position
for ii, an in enumerate(angle_offset):
yield from bps.mv(piezo.th, a_off+an)
det_exposure_time(meas_t,meas_t)
#temper = ls.ch1_read.value
name_fmt = '{sample}_x{xlocation}_{angl}deg'
sample_name = name_fmt.format(sample=names1, xlocation=x, angl = an)
sample_id(user_name=username, sample_name=sample_name)
print(f'\n\t=== Sample: {sample_name} ===\n')
yield from bp.scan(dets, waxs, *waxs_range)
sample_id(user_name='test', sample_name='test')
det_exposure_time(0.3,0.3)
def run_all_gFak():
yield from gFak1()
yield from gFak2()
yield from gFak3()
| 38.674556
| 141
| 0.549419
| 930
| 6,536
| 3.673118
| 0.145161
| 0.086944
| 0.066745
| 0.077869
| 0.849239
| 0.845433
| 0.827576
| 0.798302
| 0.798302
| 0.765222
| 0
| 0.051768
| 0.337974
| 6,536
| 168
| 142
| 38.904762
| 0.737694
| 0.061506
| 0
| 0.776978
| 0
| 0
| 0.080666
| 0.018779
| 0
| 0
| 0
| 0
| 0.007194
| 1
| 0.035971
| false
| 0
| 0
| 0
| 0.035971
| 0.028777
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98d18bd4630170c7554f1b605c3648adb9efae85
| 93,629
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ip_access_list.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ip_access_list.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_ip_access_list.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_ip_access_list(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name = ET.SubElement(standard, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_standard_hide_ip_acl_std_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
standard = ET.SubElement(access_list, "standard")
name_key = ET.SubElement(standard, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_std = ET.SubElement(standard, "hide-ip-acl-std")
seq = ET.SubElement(hide_ip_acl_std, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name = ET.SubElement(extended, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_seq_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id = ET.SubElement(seq, "seq-id")
seq_id.text = kwargs.pop('seq_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
action = ET.SubElement(seq, "action")
action.text = kwargs.pop('action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_protocol_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
protocol_type = ET.SubElement(seq, "protocol-type")
protocol_type.text = kwargs.pop('protocol_type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_any_sip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_any_sip = ET.SubElement(seq, "src-host-any-sip")
src_host_any_sip.text = kwargs.pop('src_host_any_sip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_host_ip = ET.SubElement(seq, "src-host-ip")
src_host_ip.text = kwargs.pop('src_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_src_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
src_mask = ET.SubElement(seq, "src-mask")
src_mask.text = kwargs.pop('src_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport = ET.SubElement(seq, "sport")
sport.text = kwargs.pop('sport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_tcp = ET.SubElement(seq, "sport-number-eq-neq-tcp")
sport_number_eq_neq_tcp.text = kwargs.pop('sport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_tcp = ET.SubElement(seq, "sport-number-lt-tcp")
sport_number_lt_tcp.text = kwargs.pop('sport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_tcp = ET.SubElement(seq, "sport-number-gt-tcp")
sport_number_gt_tcp.text = kwargs.pop('sport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_eq_neq_udp = ET.SubElement(seq, "sport-number-eq-neq-udp")
sport_number_eq_neq_udp.text = kwargs.pop('sport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_lt_udp = ET.SubElement(seq, "sport-number-lt-udp")
sport_number_lt_udp.text = kwargs.pop('sport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_gt_udp = ET.SubElement(seq, "sport-number-gt-udp")
sport_number_gt_udp.text = kwargs.pop('sport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_tcp = ET.SubElement(seq, "sport-number-range-lower-tcp")
sport_number_range_lower_tcp.text = kwargs.pop('sport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_lower_udp = ET.SubElement(seq, "sport-number-range-lower-udp")
sport_number_range_lower_udp.text = kwargs.pop('sport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_tcp = ET.SubElement(seq, "sport-number-range-higher-tcp")
sport_number_range_higher_tcp.text = kwargs.pop('sport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sport_number_range_higher_udp = ET.SubElement(seq, "sport-number-range-higher-udp")
sport_number_range_higher_udp.text = kwargs.pop('sport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_any_dip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_any_dip = ET.SubElement(seq, "dst-host-any-dip")
dst_host_any_dip.text = kwargs.pop('dst_host_any_dip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_host_ip(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_host_ip = ET.SubElement(seq, "dst-host-ip")
dst_host_ip.text = kwargs.pop('dst_host_ip')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dst_mask(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dst_mask = ET.SubElement(seq, "dst-mask")
dst_mask.text = kwargs.pop('dst_mask')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport = ET.SubElement(seq, "dport")
dport.text = kwargs.pop('dport')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_tcp = ET.SubElement(seq, "dport-number-eq-neq-tcp")
dport_number_eq_neq_tcp.text = kwargs.pop('dport_number_eq_neq_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_tcp = ET.SubElement(seq, "dport-number-lt-tcp")
dport_number_lt_tcp.text = kwargs.pop('dport_number_lt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_tcp = ET.SubElement(seq, "dport-number-gt-tcp")
dport_number_gt_tcp.text = kwargs.pop('dport_number_gt_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_eq_neq_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_eq_neq_udp = ET.SubElement(seq, "dport-number-eq-neq-udp")
dport_number_eq_neq_udp.text = kwargs.pop('dport_number_eq_neq_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_lt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_lt_udp = ET.SubElement(seq, "dport-number-lt-udp")
dport_number_lt_udp.text = kwargs.pop('dport_number_lt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_gt_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_gt_udp = ET.SubElement(seq, "dport-number-gt-udp")
dport_number_gt_udp.text = kwargs.pop('dport_number_gt_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_tcp = ET.SubElement(seq, "dport-number-range-lower-tcp")
dport_number_range_lower_tcp.text = kwargs.pop('dport_number_range_lower_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_lower_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_lower_udp = ET.SubElement(seq, "dport-number-range-lower-udp")
dport_number_range_lower_udp.text = kwargs.pop('dport_number_range_lower_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_tcp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_tcp = ET.SubElement(seq, "dport-number-range-higher-tcp")
dport_number_range_higher_tcp.text = kwargs.pop('dport_number_range_higher_tcp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dport_number_range_higher_udp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dport_number_range_higher_udp = ET.SubElement(seq, "dport-number-range-higher-udp")
dport_number_range_higher_udp.text = kwargs.pop('dport_number_range_higher_udp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_dscp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
dscp = ET.SubElement(seq, "dscp")
dscp.text = kwargs.pop('dscp')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_urg(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
urg = ET.SubElement(seq, "urg")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_ack(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
ack = ET.SubElement(seq, "ack")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_push(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
push = ET.SubElement(seq, "push")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_fin(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
fin = ET.SubElement(seq, "fin")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_rst(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
rst = ET.SubElement(seq, "rst")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_sync(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
sync = ET.SubElement(seq, "sync")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_vlan(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
vlan = ET.SubElement(seq, "vlan")
vlan.text = kwargs.pop('vlan')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_count(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
count = ET.SubElement(seq, "count")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def ip_acl_ip_access_list_extended_hide_ip_acl_ext_seq_log(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip_acl = ET.SubElement(config, "ip-acl", xmlns="urn:brocade.com:mgmt:brocade-ip-access-list")
ip = ET.SubElement(ip_acl, "ip")
access_list = ET.SubElement(ip, "access-list")
extended = ET.SubElement(access_list, "extended")
name_key = ET.SubElement(extended, "name")
name_key.text = kwargs.pop('name')
hide_ip_acl_ext = ET.SubElement(extended, "hide-ip-acl-ext")
seq = ET.SubElement(hide_ip_acl_ext, "seq")
seq_id_key = ET.SubElement(seq, "seq-id")
seq_id_key.text = kwargs.pop('seq_id')
log = ET.SubElement(seq, "log")
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 47.794283
| 105
| 0.643348
| 12,886
| 93,629
| 4.398262
| 0.005898
| 0.186322
| 0.084904
| 0.069447
| 0.997565
| 0.997565
| 0.997565
| 0.997565
| 0.997565
| 0.997565
| 0
| 0
| 0.220669
| 93,629
| 1,959
| 106
| 47.794283
| 0.776724
| 0.031475
| 0
| 0.997423
| 1
| 0
| 0.162347
| 0.061878
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065077
| false
| 0
| 0.000644
| 0
| 0.130799
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
98f7524b46a792a4749c45afc034066765f6b131
| 8,830
|
py
|
Python
|
tests/test_target_plot_interact_binary.py
|
antwhite/PDPbox
|
b022a0aabcc6dbe2440244bf48d08fbb6ecdaf2d
|
[
"MIT"
] | 675
|
2017-08-08T03:37:46.000Z
|
2022-03-31T20:14:02.000Z
|
tests/test_target_plot_interact_binary.py
|
antwhite/PDPbox
|
b022a0aabcc6dbe2440244bf48d08fbb6ecdaf2d
|
[
"MIT"
] | 60
|
2017-08-02T15:59:02.000Z
|
2022-03-29T03:57:22.000Z
|
tests/test_target_plot_interact_binary.py
|
antwhite/PDPbox
|
b022a0aabcc6dbe2440244bf48d08fbb6ecdaf2d
|
[
"MIT"
] | 121
|
2017-08-08T03:37:50.000Z
|
2022-03-29T10:06:11.000Z
|
from pdpbox.info_plots import target_plot_interact
import pandas as pd
import numpy as np
from numpy import nan
from pandas.testing import assert_frame_equal
def test_binary_onehot(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Sex", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Sex", "Embarked"],
target=titanic_target,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_endpoint(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
endpoint=False,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_num_grid_points(titanic_data, titanic_target):
fare_grid_points = 15
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
num_grid_points=[fare_grid_points, None],
)
assert summary_df["count"].sum() == len(titanic_data)
assert len(summary_df) == (fare_grid_points - 1) * 3
def test_onehot_numeric_gridtype_equal(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
grid_types=["equal", "equal"],
)
assert summary_df["count"].sum() == len(titanic_data)
# all bins should have the same width (equal grid type)
assert (summary_df.value_upper_1 - summary_df.value_lower_1).diff().sum() < 1e-9
def test_onehot_numeric_percentile(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
percentile_ranges=[(5, 95), None],
)
fare = titanic_data["Fare"]
inside_percentile = (fare >= fare.quantile(0.05)) & (fare <= fare.quantile(0.95))
assert summary_df["count"].sum() == inside_percentile.sum()
def test_onehot_numeric_gridranges(titanic_data, titanic_target):
"""
Grid type must be 'equal' for grid ranges to work
TODO: maybe this should be automatic or at least warn the user when grid types not 'equal'
"""
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
grid_types=["equal", "equal"],
grid_ranges=[(0, 100), None],
)
# counts must total to the total observations inside the defined range
inside_range = (titanic_data["Fare"] >= 0) & (titanic_data["Fare"] <= 100)
assert summary_df["count"].sum() == inside_range.sum()
# first and last values must be equal to the defined ranges
assert summary_df.groupby("x2").first().value_lower_1.unique()[0] == 0.0
assert summary_df.groupby("x2").last().value_upper_1.unique()[0] == 100.0
def test_onehot_numeric_gridranges_outliers(titanic_data, titanic_target):
"""
'show_outliers' implies 'grid_ranges' or another custom grid definition
TODO: again, this should be explicit to the user
"""
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
grid_types=["equal", "equal"],
grid_ranges=[(0, 100), None],
show_outliers=True,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_gridranges_outliers_endpoint(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
grid_types=["equal", "equal"],
grid_ranges=[(0, 100), None],
show_outliers=True,
endpoint=False,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_cust_grid_points(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
cust_grid_points=[range(0, 100, 10), None],
)
# counts must total to the total observations inside the defined range
inside_range = (titanic_data["Fare"] >= 0) & (titanic_data["Fare"] <= 90)
assert summary_df["count"].sum() == inside_range.sum()
# lower and upper values must follow the prescribed grid
assert (
summary_df.groupby("x1").value_lower_1.mean().values == np.arange(0.0, 90, 10)
).all()
assert (
summary_df.groupby("x1").value_upper_1.mean().values == np.arange(10.0, 100, 10)
).all()
def test_onehot_numeric_gridpoints_outliers(titanic_data, titanic_target):
"""
'show_outliers' implies 'custom_grid_points' or another custom grid definition
TODO: again, this should be explicit to the user
"""
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
cust_grid_points=[range(0, 100, 10), None],
show_outliers=True,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_gridpoints_outliers_endpoint(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
cust_grid_points=[range(0, 100, 10), None],
show_outliers=True,
endpoint=False,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_show_percentiles(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
percentile_ranges=[(5, 95), None],
show_percentile=True,
)
fare = titanic_data["Fare"]
inside_percentile = (fare >= fare.quantile(0.05)) & (fare <= fare.quantile(0.95))
assert summary_df["count"].sum() == inside_percentile.sum()
def test_onehot_numeric_show_outliers(titanic_data, titanic_target):
"""
'show_outliers' implies 'percentile_ranges' or another custom grid definition
TODO: again, this should be explicit to the user
"""
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
percentile_ranges=[(5, 95), None],
show_percentile=True,
show_outliers=True,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_onehot_numeric_show_outliers_endpoint(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", ["Embarked_C", "Embarked_Q", "Embarked_S"]],
feature_names=["Fare", "Embarked"],
target=titanic_target,
percentile_ranges=[(5, 95), None],
show_percentile=True,
show_outliers=True,
endpoint=False,
)
assert summary_df["count"].sum() == len(titanic_data)
def test_binary_numeric(titanic_data, titanic_target):
fig, axes, summary_df = target_plot_interact(
df=titanic_data,
features=["Fare", "Sex"],
feature_names=["Fare", "Sex"],
target=titanic_target,
show_percentile=True,
percentile_ranges=[(5, 95), None],
show_outliers=True,
)
assert summary_df["count"].sum() == len(titanic_data)
| 36.487603
| 94
| 0.661382
| 1,111
| 8,830
| 4.947795
| 0.110711
| 0.100055
| 0.057304
| 0.069856
| 0.850282
| 0.832818
| 0.795525
| 0.795525
| 0.733673
| 0.733673
| 0
| 0.014493
| 0.202945
| 8,830
| 241
| 95
| 36.639004
| 0.766553
| 0.093092
| 0
| 0.718232
| 0
| 0
| 0.107733
| 0
| 0
| 0
| 0
| 0.016598
| 0.127072
| 1
| 0.088398
| false
| 0
| 0.027624
| 0
| 0.116022
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c701452e36b0618fcee2d8134c0456a02b55ec6d
| 11,460
|
py
|
Python
|
gators/encoders/tests/test_muticlass_encoder.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 4
|
2021-10-29T18:20:52.000Z
|
2022-03-31T22:53:03.000Z
|
gators/encoders/tests/test_muticlass_encoder.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 1
|
2022-01-19T12:16:19.000Z
|
2022-01-19T12:16:19.000Z
|
gators/encoders/tests/test_muticlass_encoder.py
|
Aditya-Kapadiya/gators
|
d7c9967e3a8e304a601b6a92ad834d03d3e36338
|
[
"Apache-2.0"
] | 5
|
2021-11-17T20:16:54.000Z
|
2022-02-21T18:21:02.000Z
|
# License: Apache-2.0
import databricks.koalas as ks
import numpy as np
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
from gators.encoders import MultiClassEncoder, WOEEncoder
ks.set_option("compute.default_index_type", "distributed-sequence")
@pytest.fixture
def data():
X = pd.DataFrame(
{
"A": ["Q", "Q", "Q", "W", "W", "W"],
"B": ["Q", "Q", "W", "W", "W", "W"],
"C": ["Q", "Q", "Q", "Q", "W", "W"],
"D": [1, 2, 3, 4, 5, 6],
}
)
y = pd.Series([0, 0, 1, 2, 1, 2], name="TARGET")
obj = MultiClassEncoder(WOEEncoder()).fit(X, y)
X_expected = pd.DataFrame(
{
"D": {0: 1.0, 1: 2.0, 2: 3.0, 3: 4.0, 4: 5.0, 5: 6.0},
"A__TARGET_1_WOEEncoder": {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0},
"B__TARGET_1_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_1_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"A__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.0,
3: 1.3862943611198906,
4: 1.3862943611198906,
5: 1.3862943611198906,
},
"B__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_2_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
}
)
return obj, X, X_expected
@pytest.fixture
def data_float32():
X = pd.DataFrame(
{
"A": ["Q", "Q", "Q", "W", "W", "W"],
"B": ["Q", "Q", "W", "W", "W", "W"],
"C": ["Q", "Q", "Q", "Q", "W", "W"],
"D": [1, 2, 3, 4, 5, 6],
}
)
y = pd.Series([0, 0, 1, 2, 1, 2], name="TARGET")
obj = MultiClassEncoder(WOEEncoder(), dtype=np.float32).fit(X, y)
X_expected = pd.DataFrame(
{
"D": {0: 1.0, 1: 2.0, 2: 3.0, 3: 4.0, 4: 5.0, 5: 6.0},
"A__TARGET_1_WOEEncoder": {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0},
"B__TARGET_1_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_1_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"A__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.0,
3: 1.3862943611198906,
4: 1.3862943611198906,
5: 1.3862943611198906,
},
"B__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_2_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
}
).astype(np.float32)
return obj, X, X_expected
@pytest.fixture
def data_no_cat():
X = pd.DataFrame(
np.zeros((3, 6)),
columns=list("qweasd"),
)
y = pd.Series([1, 2, 0], name="TARGET")
obj = MultiClassEncoder(WOEEncoder()).fit(X, y)
return obj, X, X.copy()
@pytest.fixture
def data_ks():
X = ks.DataFrame(
{
"A": ["Q", "Q", "Q", "W", "W", "W"],
"B": ["Q", "Q", "W", "W", "W", "W"],
"C": ["Q", "Q", "Q", "Q", "W", "W"],
"D": [1, 2, 3, 4, 5, 6],
}
)
y = ks.Series([0, 0, 1, 2, 1, 2], name="TARGET")
obj = MultiClassEncoder(WOEEncoder()).fit(X, y)
X_expected = pd.DataFrame(
{
"D": {0: 1.0, 1: 2.0, 2: 3.0, 3: 4.0, 4: 5.0, 5: 6.0},
"A__TARGET_1_WOEEncoder": {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0},
"B__TARGET_1_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_1_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"A__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.0,
3: 1.3862943611198906,
4: 1.3862943611198906,
5: 1.3862943611198906,
},
"B__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_2_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
}
)
return obj, X, X_expected
@pytest.fixture
def data_float32_ks():
X = ks.DataFrame(
{
"A": ["Q", "Q", "Q", "W", "W", "W"],
"B": ["Q", "Q", "W", "W", "W", "W"],
"C": ["Q", "Q", "Q", "Q", "W", "W"],
"D": [1, 2, 3, 4, 5, 6],
}
)
y = ks.Series([0, 0, 1, 2, 1, 2], name="TARGET")
obj = MultiClassEncoder(WOEEncoder(), dtype=np.float32).fit(X, y)
X_expected = pd.DataFrame(
{
"D": {0: 1.0, 1: 2.0, 2: 3.0, 3: 4.0, 4: 5.0, 5: 6.0},
"A__TARGET_1_WOEEncoder": {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0},
"B__TARGET_1_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_1_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"A__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.0,
3: 1.3862943611198906,
4: 1.3862943611198906,
5: 1.3862943611198906,
},
"B__TARGET_2_WOEEncoder": {
0: 0.0,
1: 0.0,
2: 0.6931471805599453,
3: 0.6931471805599453,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
"C__TARGET_2_WOEEncoder": {
0: -0.40546510810816444,
1: -0.40546510810816444,
2: -0.40546510810816444,
3: -0.40546510810816444,
4: 0.6931471805599453,
5: 0.6931471805599453,
},
}
).astype(np.float32)
return obj, X, X_expected
@pytest.fixture
def data_no_cat_ks():
X = ks.DataFrame(
np.zeros((3, 6)),
columns=list("qweasd"),
)
y = ks.Series([1, 2, 0], name="TARGET")
obj = MultiClassEncoder(WOEEncoder()).fit(X, y)
return obj, X, X.to_pandas().copy()
def test_pd(data):
obj, X, X_expected = data
X_new = obj.transform(X)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_ks(data_ks):
obj, X, X_expected = data_ks
X_new = obj.transform(X)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_pd_np(data):
obj, X, X_expected = data
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_ks_np(data_ks):
obj, X, X_expected = data_ks
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
def test_float32_pd(data_float32):
obj, X, X_expected = data_float32
X_new = obj.transform(X)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_float32_ks(data_float32_ks):
obj, X, X_expected = data_float32_ks
X_new = obj.transform(X)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_float32_pd_np(data_float32):
obj, X, X_expected = data_float32
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_float32_ks_np(data_float32_ks):
obj, X, X_expected = data_float32_ks
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
def test_without_cat_pd(data_no_cat):
obj, X, X_expected = data_no_cat
X_new = obj.transform(X)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_without_cat_ks(data_no_cat_ks):
obj, X, X_expected = data_no_cat_ks
X_new = obj.transform(X)
assert_frame_equal(X_new.to_pandas(), X_expected)
def test_without_cat_pd_np(data_no_cat):
obj, X, X_expected = data_no_cat
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
@pytest.mark.koalas
def test_without_cat_ks_np(data_no_cat_ks):
obj, X, X_expected = data_no_cat_ks
X_numpy = X.to_numpy()
X_numpy_new = obj.transform_numpy(X_numpy)
X_new = pd.DataFrame(X_numpy_new, columns=X_expected.columns)
assert_frame_equal(X_new, X_expected)
def test_init():
with pytest.raises(TypeError):
_ = MultiClassEncoder(encoder="q")
with pytest.raises(TypeError):
_ = MultiClassEncoder(encoder="q")
| 30.641711
| 87
| 0.496248
| 1,439
| 11,460
| 3.731758
| 0.054899
| 0.029795
| 0.053631
| 0.038734
| 0.940596
| 0.940037
| 0.936685
| 0.905773
| 0.905773
| 0.870764
| 0
| 0.279747
| 0.364921
| 11,460
| 373
| 88
| 30.723861
| 0.458093
| 0.001658
| 0
| 0.777108
| 0
| 0
| 0.062593
| 0.048431
| 0
| 0
| 0
| 0
| 0.039157
| 1
| 0.057229
| false
| 0
| 0.018072
| 0
| 0.093373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c723276db3b920a06738d51cac498255ec048fce
| 1,200
|
py
|
Python
|
tests/transform/test_resource.py
|
hbruch/frictionless-py
|
0f97d33c8fea7ef60cf8458b72eb0f54f4649798
|
[
"MIT"
] | null | null | null |
tests/transform/test_resource.py
|
hbruch/frictionless-py
|
0f97d33c8fea7ef60cf8458b72eb0f54f4649798
|
[
"MIT"
] | null | null | null |
tests/transform/test_resource.py
|
hbruch/frictionless-py
|
0f97d33c8fea7ef60cf8458b72eb0f54f4649798
|
[
"MIT"
] | null | null | null |
from frictionless import Resource, transform, steps
# General
def test_transform_resource():
source = Resource(path="data/transform.csv")
source.infer()
target = transform(
source,
steps=[
steps.table_normalize(),
steps.table_melt(field_name="id"),
steps.table_recast(field_name="id"),
],
)
assert target.schema == source.schema
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83},
{"id": 2, "name": "france", "population": 66},
{"id": 3, "name": "spain", "population": 47},
]
def test_transform_resource_from_descriptor():
source = Resource(path="data/transform.csv")
source.infer()
target = transform(
source.to_dict(),
steps=[
steps.table_normalize(),
steps.table_melt(field_name="id"),
steps.table_recast(field_name="id"),
],
)
assert target.schema == source.schema
assert target.read_rows() == [
{"id": 1, "name": "germany", "population": 83},
{"id": 2, "name": "france", "population": 66},
{"id": 3, "name": "spain", "population": 47},
]
| 27.906977
| 55
| 0.56
| 127
| 1,200
| 5.141732
| 0.307087
| 0.091884
| 0.067381
| 0.073507
| 0.817764
| 0.817764
| 0.817764
| 0.817764
| 0.817764
| 0.817764
| 0
| 0.020548
| 0.27
| 1,200
| 42
| 56
| 28.571429
| 0.724886
| 0.005833
| 0
| 0.742857
| 0
| 0
| 0.147775
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 1
| 0.057143
| false
| 0
| 0.028571
| 0
| 0.085714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c75409e0b19d4bf2ed9b732b75a0d4ee224b1ade
| 63
|
py
|
Python
|
repetition/print-list-numbers.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
repetition/print-list-numbers.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
repetition/print-list-numbers.py
|
gabrielcostasilva/python-basic-examples
|
d9c20887b94b823fc59bc82f250d39d76b918ad8
|
[
"MIT"
] | null | null | null |
[print(x) for x in range(1,11)]
print([x for x in range(1,11)])
| 31.5
| 31
| 0.634921
| 16
| 63
| 2.5
| 0.4375
| 0.3
| 0.45
| 0.5
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0.111111
| 0.142857
| 63
| 2
| 32
| 31.5
| 0.62963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
1bef74581b836f8badc7010e8a7fa32b8d819273
| 2,469
|
py
|
Python
|
funds/migrations/0002_auto_20210604_0459.py
|
julianespinel/fund
|
370b46fc85ae47bf0146e0ab2af49cc158f7ce94
|
[
"MIT"
] | 1
|
2021-10-29T20:13:57.000Z
|
2021-10-29T20:13:57.000Z
|
funds/migrations/0002_auto_20210604_0459.py
|
julianespinel/fund
|
370b46fc85ae47bf0146e0ab2af49cc158f7ce94
|
[
"MIT"
] | null | null | null |
funds/migrations/0002_auto_20210604_0459.py
|
julianespinel/fund
|
370b46fc85ae47bf0146e0ab2af49cc158f7ce94
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-06-04 04:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('funds', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='fund',
name='added_usd',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='fund',
name='market_value',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='historicalprice',
name='closing_price',
field=models.DecimalField(decimal_places=10, max_digits=19),
),
migrations.AlterField(
model_name='investor',
name='added_usd',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='investor',
name='market_value',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='investor',
name='ownership_percentage',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
migrations.AlterField(
model_name='operation',
name='fee',
field=models.DecimalField(decimal_places=10, max_digits=19),
),
migrations.AlterField(
model_name='operation',
name='price',
field=models.DecimalField(decimal_places=10, max_digits=19),
),
migrations.AlterField(
model_name='operation',
name='total',
field=models.DecimalField(decimal_places=10, max_digits=19),
),
migrations.AlterField(
model_name='operation',
name='units',
field=models.DecimalField(decimal_places=10, max_digits=19),
),
migrations.AlterField(
model_name='portfoliosnapshot',
name='market_price',
field=models.DecimalField(decimal_places=10, max_digits=19, null=True),
),
migrations.AlterField(
model_name='position',
name='units',
field=models.DecimalField(decimal_places=10, default=0, max_digits=19),
),
]
| 33.364865
| 83
| 0.580397
| 239
| 2,469
| 5.811715
| 0.225941
| 0.172786
| 0.215983
| 0.25054
| 0.810655
| 0.810655
| 0.784017
| 0.784017
| 0.773938
| 0.758819
| 0
| 0.042056
| 0.306602
| 2,469
| 73
| 84
| 33.821918
| 0.769276
| 0.017416
| 0
| 0.746269
| 1
| 0
| 0.096947
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1bf5652f598f5ef464fa1752b7eecfd3594a5b23
| 282
|
py
|
Python
|
python/CV_ex074.py
|
FaunoGuazina/X_very_old_algorithm_exercises
|
c9b9ec78e8b82f2e23ef85ba9a5e7fd6e0deaea6
|
[
"MIT"
] | null | null | null |
python/CV_ex074.py
|
FaunoGuazina/X_very_old_algorithm_exercises
|
c9b9ec78e8b82f2e23ef85ba9a5e7fd6e0deaea6
|
[
"MIT"
] | null | null | null |
python/CV_ex074.py
|
FaunoGuazina/X_very_old_algorithm_exercises
|
c9b9ec78e8b82f2e23ef85ba9a5e7fd6e0deaea6
|
[
"MIT"
] | null | null | null |
from random import randint
a = randint(0, 999), randint(0, 999), randint(0, 999), randint(0, 999), randint(0, 999)
print(f'os números são:\n{a}')
print(f'o maior número é {sorted(a)[0]}\no menor número é {sorted(a)[-1]}')
# poderia usar o método max(a) e min(a) para maior e menor.
| 47
| 87
| 0.673759
| 56
| 282
| 3.392857
| 0.5
| 0.210526
| 0.289474
| 0.378947
| 0.289474
| 0.289474
| 0.289474
| 0.289474
| 0.289474
| 0.289474
| 0
| 0.090909
| 0.141844
| 282
| 5
| 88
| 56.4
| 0.694215
| 0.202128
| 0
| 0
| 0
| 0.25
| 0.381166
| 0
| 0
| 0
| 0
| 0.2
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
1bfb83ab2975090e525645fb219900d2b9ad2ae6
| 39,875
|
py
|
Python
|
plugins/modules/oci_network_security_group_security_rule_actions.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_network_security_group_security_rule_actions.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_network_security_group_security_rule_actions.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_network_security_group_security_rule_actions
short_description: Perform actions on a NetworkSecurityGroupSecurityRule resource in Oracle Cloud Infrastructure
description:
- Perform actions on a NetworkSecurityGroupSecurityRule resource in Oracle Cloud Infrastructure
- For I(action=add), adds one or more security rules to the specified network security group.
- For I(action=remove), removes one or more security rules from the specified network security group.
- For I(action=update), updates one or more security rules in the specified network security group.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
network_security_group_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the network security group.
type: str
aliases: ["id"]
required: true
security_rules:
description:
- The NSG security rules to add.
- Applicable only for I(action=add)I(action=update).
type: list
elements: dict
suboptions:
description:
description:
- An optional description of your choice for the rule. Avoid entering confidential information.
type: str
destination:
description:
- Conceptually, this is the range of IP addresses that a packet originating from the instance
can go to.
- "Allowed values:"
- " * An IP address range in CIDR notation. For example: `192.168.1.0/24` or `2001:0db8:0123:45::/56`
IPv6 addressing is supported for all commercial and government regions. See
L(IPv6 Addresses,https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/ipv6.htm)."
- " * The `cidrBlock` value for a L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/), if you're
setting up a security rule for traffic destined for a particular `Service` through
a service gateway. For example: `oci-phx-objectstorage`."
- " * The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/) in the same
VCN. The value can be the NSG that the rule belongs to if the rule's intent is to control
traffic between VNICs in the same NSG."
type: str
destination_type:
description:
- Type of destination for the rule. Required if `direction` = `EGRESS`.
- "Allowed values:"
- " * `CIDR_BLOCK`: If the rule's `destination` is an IP address range in CIDR notation."
- " * `SERVICE_CIDR_BLOCK`: If the rule's `destination` is the `cidrBlock` value for a
L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/) (the rule is for traffic destined for a
particular `Service` through a service gateway)."
- " * `NETWORK_SECURITY_GROUP`: If the rule's `destination` is the
L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/)."
type: str
choices:
- "CIDR_BLOCK"
- "SERVICE_CIDR_BLOCK"
- "NETWORK_SECURITY_GROUP"
direction:
description:
- Direction of the security rule. Set to `EGRESS` for rules to allow outbound IP packets,
or `INGRESS` for rules to allow inbound IP packets.
type: str
choices:
- "EGRESS"
- "INGRESS"
required: true
icmp_options:
description:
- ""
type: dict
suboptions:
code:
description:
- The ICMP code (optional).
type: int
type:
description:
- The ICMP type.
type: int
required: true
is_stateless:
description:
- A stateless rule allows traffic in one direction. Remember to add a corresponding
stateless rule in the other direction if you need to support bidirectional traffic. For
example, if egress traffic allows TCP destination port 80, there should be an ingress
rule to allow TCP source port 80. Defaults to false, which means the rule is stateful
and a corresponding rule is not necessary for bidirectional traffic.
type: bool
protocol:
description:
- "The transport protocol. Specify either `all` or an IPv4 protocol number as
defined in
L(Protocol Numbers,http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml).
Options are supported only for ICMP (\\"1\\"), TCP (\\"6\\"), UDP (\\"17\\"), and ICMPv6 (\\"58\\")."
type: str
required: true
source:
description:
- Conceptually, this is the range of IP addresses that a packet coming into the instance
can come from.
- "Allowed values:"
- " * An IP address range in CIDR notation. For example: `192.168.1.0/24` or `2001:0db8:0123:45::/56`
IPv6 addressing is supported for all commercial and government regions. See
L(IPv6 Addresses,https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/ipv6.htm)."
- " * The `cidrBlock` value for a L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/), if you're
setting up a security rule for traffic coming from a particular `Service` through
a service gateway. For example: `oci-phx-objectstorage`."
- " * The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/) in the same
VCN. The value can be the NSG that the rule belongs to if the rule's intent is to control
traffic between VNICs in the same NSG."
type: str
source_type:
description:
- Type of source for the rule. Required if `direction` = `INGRESS`.
- " * `CIDR_BLOCK`: If the rule's `source` is an IP address range in CIDR notation."
- " * `SERVICE_CIDR_BLOCK`: If the rule's `source` is the `cidrBlock` value for a
L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/) (the rule is for traffic coming from a
particular `Service` through a service gateway)."
- " * `NETWORK_SECURITY_GROUP`: If the rule's `source` is the
L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/)."
type: str
choices:
- "CIDR_BLOCK"
- "SERVICE_CIDR_BLOCK"
- "NETWORK_SECURITY_GROUP"
tcp_options:
description:
- ""
type: dict
suboptions:
destination_port_range:
description:
- ""
type: dict
suboptions:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
type: int
required: true
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
type: int
required: true
source_port_range:
description:
- ""
type: dict
suboptions:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
type: int
required: true
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
type: int
required: true
udp_options:
description:
- ""
type: dict
suboptions:
destination_port_range:
description:
- ""
type: dict
suboptions:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
type: int
required: true
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
type: int
required: true
source_port_range:
description:
- ""
type: dict
suboptions:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
type: int
required: true
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
type: int
required: true
id:
description:
- The Oracle-assigned ID of the security rule that you want to update. You can't change this value.
- "Example: `04ABEC`"
type: str
security_rule_ids:
description:
- The Oracle-assigned ID of each L(SecurityRule,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/SecurityRule/) to be deleted.
- Applicable only for I(action=remove).
type: list
elements: str
action:
description:
- The action to perform on the NetworkSecurityGroupSecurityRule.
type: str
required: true
choices:
- "add"
- "remove"
- "update"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Perform action add on network_security_group_security_rule
oci_network_security_group_security_rule_actions:
# required
network_security_group_id: "ocid1.networksecuritygroup.oc1..xxxxxxEXAMPLExxxxxx"
action: add
# optional
security_rules:
- # required
direction: EGRESS
protocol: protocol_example
# optional
description: description_example
destination: destination_example
destination_type: CIDR_BLOCK
icmp_options:
# required
type: 56
# optional
code: 56
is_stateless: true
source: source_example
source_type: CIDR_BLOCK
tcp_options:
# optional
destination_port_range:
# required
max: 56
min: 56
source_port_range:
# required
max: 56
min: 56
udp_options:
# optional
destination_port_range:
# required
max: 56
min: 56
source_port_range:
# required
max: 56
min: 56
id: 04ABEC
- name: Perform action remove on network_security_group_security_rule
oci_network_security_group_security_rule_actions:
# required
network_security_group_id: "ocid1.networksecuritygroup.oc1..xxxxxxEXAMPLExxxxxx"
action: remove
# optional
security_rule_ids: [ "null" ]
- name: Perform action update on network_security_group_security_rule
oci_network_security_group_security_rule_actions:
# required
network_security_group_id: "ocid1.networksecuritygroup.oc1..xxxxxxEXAMPLExxxxxx"
action: update
# optional
security_rules:
- # required
direction: EGRESS
protocol: protocol_example
# optional
description: description_example
destination: destination_example
destination_type: CIDR_BLOCK
icmp_options:
# required
type: 56
# optional
code: 56
is_stateless: true
source: source_example
source_type: CIDR_BLOCK
tcp_options:
# optional
destination_port_range:
# required
max: 56
min: 56
source_port_range:
# required
max: 56
min: 56
udp_options:
# optional
destination_port_range:
# required
max: 56
min: 56
source_port_range:
# required
max: 56
min: 56
id: 04ABEC
"""
RETURN = """
network_security_group_security_rule:
description:
- Details of the NetworkSecurityGroupSecurityRule resource acted upon by the current operation
returned: on success
type: complex
contains:
security_rules:
description:
- The NSG security rules that were added.
returned: on success
type: complex
contains:
description:
description:
- An optional description of your choice for the rule.
returned: on success
type: str
sample: description_example
destination:
description:
- Conceptually, this is the range of IP addresses that a packet originating from the instance
can go to.
- "Allowed values:"
- " * An IP address range in CIDR notation. For example: `192.168.1.0/24` or `2001:0db8:0123:45::/56`
IPv6 addressing is supported for all commercial and government regions.
See L(IPv6 Addresses,https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/ipv6.htm)."
- " * The `cidrBlock` value for a L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/), if you're
setting up a security rule for traffic destined for a particular `Service` through
a service gateway. For example: `oci-phx-objectstorage`."
- " * The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/) in the same
VCN. The value can be the NSG that the rule belongs to if the rule's intent is to control
traffic between VNICs in the same NSG."
returned: on success
type: str
sample: destination_example
destination_type:
description:
- Type of destination for the rule. Required if `direction` = `EGRESS`.
- "Allowed values:"
- " * `CIDR_BLOCK`: If the rule's `destination` is an IP address range in CIDR notation."
- " * `SERVICE_CIDR_BLOCK`: If the rule's `destination` is the `cidrBlock` value for a
L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/) (the rule is for traffic destined for a
particular `Service` through a service gateway)."
- " * `NETWORK_SECURITY_GROUP`: If the rule's `destination` is the
L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/)."
returned: on success
type: str
sample: CIDR_BLOCK
direction:
description:
- Direction of the security rule. Set to `EGRESS` for rules to allow outbound IP packets,
or `INGRESS` for rules to allow inbound IP packets.
returned: on success
type: str
sample: EGRESS
icmp_options:
description:
- ""
returned: on success
type: complex
contains:
code:
description:
- The ICMP code (optional).
returned: on success
type: int
sample: 56
type:
description:
- The ICMP type.
returned: on success
type: int
sample: 56
id:
description:
- An Oracle-assigned identifier for the security rule. You specify this ID when you want to
update or delete the rule.
- "Example: `04ABEC`"
returned: on success
type: str
sample: 04ABEC
is_stateless:
description:
- A stateless rule allows traffic in one direction. Remember to add a corresponding
stateless rule in the other direction if you need to support bidirectional traffic. For
example, if egress traffic allows TCP destination port 80, there should be an ingress
rule to allow TCP source port 80. Defaults to false, which means the rule is stateful
and a corresponding rule is not necessary for bidirectional traffic.
returned: on success
type: bool
sample: true
is_valid:
description:
- Whether the rule is valid. The value is `True` when the rule is first created. If
the rule's `source` or `destination` is a network security group, the value changes to
`False` if that network security group is deleted.
returned: on success
type: bool
sample: true
protocol:
description:
- "The transport protocol. Specify either `all` or an IPv4 protocol number as
defined in
L(Protocol Numbers,http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml).
Options are supported only for ICMP (\\"1\\"), TCP (\\"6\\"), UDP (\\"17\\"), and ICMPv6 (\\"58\\")."
returned: on success
type: str
sample: protocol_example
source:
description:
- Conceptually, this is the range of IP addresses that a packet coming into the instance
can come from.
- "Allowed values:"
- " * An IP address range in CIDR notation. For example: `192.168.1.0/24` or `2001:0db8:0123:45::/56`
IPv6 addressing is supported for all commercial and government regions.
See L(IPv6 Addresses,https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/ipv6.htm)."
- " * The `cidrBlock` value for a L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/), if you're
setting up a security rule for traffic coming from a particular `Service` through
a service gateway. For example: `oci-phx-objectstorage`."
- " * The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/) in the same
VCN. The value can be the NSG that the rule belongs to if the rule's intent is to control
traffic between VNICs in the same NSG."
returned: on success
type: str
sample: source_example
source_type:
description:
- Type of source for the rule. Required if `direction` = `INGRESS`.
- " * `CIDR_BLOCK`: If the rule's `source` is an IP address range in CIDR notation."
- " * `SERVICE_CIDR_BLOCK`: If the rule's `source` is the `cidrBlock` value for a
L(Service,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/Service/) (the rule is for traffic coming from a
particular `Service` through a service gateway)."
- " * `NETWORK_SECURITY_GROUP`: If the rule's `source` is the
L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of a
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/latest/NetworkSecurityGroup/)."
returned: on success
type: str
sample: CIDR_BLOCK
tcp_options:
description:
- ""
returned: on success
type: complex
contains:
destination_port_range:
description:
- ""
returned: on success
type: complex
contains:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
returned: on success
type: int
sample: 56
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
returned: on success
type: int
sample: 56
source_port_range:
description:
- ""
returned: on success
type: complex
contains:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
returned: on success
type: int
sample: 56
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
returned: on success
type: int
sample: 56
time_created:
description:
- The date and time the security rule was created. Format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
udp_options:
description:
- ""
returned: on success
type: complex
contains:
destination_port_range:
description:
- ""
returned: on success
type: complex
contains:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
returned: on success
type: int
sample: 56
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
returned: on success
type: int
sample: 56
source_port_range:
description:
- ""
returned: on success
type: complex
contains:
max:
description:
- The maximum port number, which must not be less than the minimum port number. To specify
a single port number, set both the min and max to the same value.
returned: on success
type: int
sample: 56
min:
description:
- The minimum port number, which must not be greater than the maximum port number.
returned: on success
type: int
sample: 56
sample: {
"security_rules": [{
"description": "description_example",
"destination": "destination_example",
"destination_type": "CIDR_BLOCK",
"direction": "EGRESS",
"icmp_options": {
"code": 56,
"type": 56
},
"id": "04ABEC",
"is_stateless": true,
"is_valid": true,
"protocol": "protocol_example",
"source": "source_example",
"source_type": "CIDR_BLOCK",
"tcp_options": {
"destination_port_range": {
"max": 56,
"min": 56
},
"source_port_range": {
"max": 56,
"min": 56
}
},
"time_created": "2013-10-20T19:20:30+01:00",
"udp_options": {
"destination_port_range": {
"max": 56,
"min": 56
},
"source_port_range": {
"max": 56,
"min": 56
}
}
}]
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.core import VirtualNetworkClient
from oci.core.models import AddNetworkSecurityGroupSecurityRulesDetails
from oci.core.models import RemoveNetworkSecurityGroupSecurityRulesDetails
from oci.core.models import UpdateNetworkSecurityGroupSecurityRulesDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class NetworkSecurityGroupSecurityRuleActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
add
remove
update
"""
@staticmethod
def get_module_resource_id_param():
return "network_security_group_id"
def get_module_resource_id(self):
return self.module.params.get("network_security_group_id")
def add(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, AddNetworkSecurityGroupSecurityRulesDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.add_network_security_group_security_rules,
call_fn_args=(),
call_fn_kwargs=dict(
network_security_group_id=self.module.params.get(
"network_security_group_id"
),
add_network_security_group_security_rules_details=action_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
def remove(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, RemoveNetworkSecurityGroupSecurityRulesDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.remove_network_security_group_security_rules,
call_fn_args=(),
call_fn_kwargs=dict(
network_security_group_id=self.module.params.get(
"network_security_group_id"
),
remove_network_security_group_security_rules_details=action_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
def update(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, UpdateNetworkSecurityGroupSecurityRulesDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_network_security_group_security_rules,
call_fn_args=(),
call_fn_kwargs=dict(
network_security_group_id=self.module.params.get(
"network_security_group_id"
),
update_network_security_group_security_rules_details=action_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
NetworkSecurityGroupSecurityRuleActionsHelperCustom = get_custom_class(
"NetworkSecurityGroupSecurityRuleActionsHelperCustom"
)
class ResourceHelper(
NetworkSecurityGroupSecurityRuleActionsHelperCustom,
NetworkSecurityGroupSecurityRuleActionsHelperGen,
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=False
)
module_args.update(
dict(
network_security_group_id=dict(aliases=["id"], type="str", required=True),
security_rules=dict(
type="list",
elements="dict",
options=dict(
description=dict(type="str"),
destination=dict(type="str"),
destination_type=dict(
type="str",
choices=[
"CIDR_BLOCK",
"SERVICE_CIDR_BLOCK",
"NETWORK_SECURITY_GROUP",
],
),
direction=dict(
type="str", required=True, choices=["EGRESS", "INGRESS"]
),
icmp_options=dict(
type="dict",
options=dict(
code=dict(type="int"), type=dict(type="int", required=True)
),
),
is_stateless=dict(type="bool"),
protocol=dict(type="str", required=True),
source=dict(type="str"),
source_type=dict(
type="str",
choices=[
"CIDR_BLOCK",
"SERVICE_CIDR_BLOCK",
"NETWORK_SECURITY_GROUP",
],
),
tcp_options=dict(
type="dict",
options=dict(
destination_port_range=dict(
type="dict",
options=dict(
max=dict(type="int", required=True),
min=dict(type="int", required=True),
),
),
source_port_range=dict(
type="dict",
options=dict(
max=dict(type="int", required=True),
min=dict(type="int", required=True),
),
),
),
),
udp_options=dict(
type="dict",
options=dict(
destination_port_range=dict(
type="dict",
options=dict(
max=dict(type="int", required=True),
min=dict(type="int", required=True),
),
),
source_port_range=dict(
type="dict",
options=dict(
max=dict(type="int", required=True),
min=dict(type="int", required=True),
),
),
),
),
id=dict(type="str"),
),
),
security_rule_ids=dict(type="list", elements="str"),
action=dict(type="str", required=True, choices=["add", "remove", "update"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="network_security_group_security_rule",
service_client_class=VirtualNetworkClient,
namespace="core",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main()
| 46.312427
| 151
| 0.488727
| 3,715
| 39,875
| 5.119785
| 0.099596
| 0.033123
| 0.044164
| 0.031546
| 0.823659
| 0.790641
| 0.764196
| 0.750421
| 0.731546
| 0.727077
| 0
| 0.013649
| 0.44326
| 39,875
| 860
| 152
| 46.366279
| 0.843108
| 0.010809
| 0
| 0.773234
| 0
| 0.070632
| 0.791804
| 0.045927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007435
| false
| 0.001239
| 0.011152
| 0.002478
| 0.027261
| 0.001239
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
407b8cd8d133fdfda41ce4409b4e94e12a837c25
| 2,996
|
py
|
Python
|
extensions/.stubs/clrclasses/Autodesk/AutoCAD/GraphicsSystem/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/Autodesk/AutoCAD/GraphicsSystem/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/Autodesk/AutoCAD/GraphicsSystem/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import CertificationData
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ClientViewInfo
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Configuration
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ConfigWasModifiedEventHandler
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import DefaultLightingType
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import DegradationChannel
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Device
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import DeviceType
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import DriverInfo
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import EffectListType
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import EffectStatus
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ErrorStatus
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import GetInterfaceFunction
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import GraphicsKernel
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import GsToBeUnloadedEventHandler
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Handedness
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import HighlightStyle
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import InvalidationHint
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import KernelDescriptor
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import LinePattern
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import LineWeight
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Manager
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Manager2
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Model
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Node
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Projection
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import Quality
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ReleaseInterfaceFunction
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import RendererType
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import RenderType
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import StereoParameters
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import View
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewEventArgs
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewportBorderProperties
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewToBeDestroyedEventHandler
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewToBeUpdatedEventHandler
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewUpdateEventArgs
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewUpdateFlags
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewWasCreatedEventHandler
from __clrclasses__.Autodesk.AutoCAD.GraphicsSystem import ViewWasUpdatedEventHandler
| 73.073171
| 88
| 0.906542
| 280
| 2,996
| 9.128571
| 0.164286
| 0.219092
| 0.344288
| 0.453834
| 0.766823
| 0.766823
| 0
| 0
| 0
| 0
| 0
| 0.000353
| 0.053405
| 2,996
| 40
| 89
| 74.9
| 0.900917
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
40eae4f02c94afade2d0152da00eb44e02e91276
| 72,900
|
py
|
Python
|
corehq/apps/data_interfaces/tests/test_scheduling_integration.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | null | null | null |
corehq/apps/data_interfaces/tests/test_scheduling_integration.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | null | null | null |
corehq/apps/data_interfaces/tests/test_scheduling_integration.py
|
akashkj/commcare-hq
|
b00a62336ec26cea1477dfb8c048c548cc462831
|
[
"BSD-3-Clause"
] | null | null | null |
from contextlib import contextmanager
from datetime import date, datetime, time
from django.db.models import Q
from django.test import TestCase
from unittest.mock import call, patch
from casexml.apps.case.tests.util import create_case
from corehq.apps.app_manager.models import (
AdvancedForm,
AdvancedModule,
FormSchedule,
SchedulePhase,
SchedulePhaseForm,
ScheduleVisit,
)
from corehq.apps.data_interfaces.models import (
AutomaticUpdateRule,
CreateScheduleInstanceActionDefinition,
MatchPropertyDefinition,
VisitSchedulerIntegrationHelper,
)
from corehq.apps.data_interfaces.tests.util import create_empty_rule
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import update_case
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.messaging.scheduling.const import (
VISIT_WINDOW_DUE_DATE,
VISIT_WINDOW_END,
VISIT_WINDOW_START,
)
from corehq.messaging.scheduling.models import (
AlertSchedule,
CasePropertyTimedEvent,
SMSContent,
TimedEvent,
TimedSchedule,
)
from corehq.messaging.scheduling.scheduling_partitioned.dbaccessors import (
delete_case_schedule_instance,
get_case_alert_schedule_instances_for_schedule,
get_case_timed_schedule_instances_for_schedule,
)
from corehq.messaging.scheduling.scheduling_partitioned.models import (
CaseAlertScheduleInstance,
CaseTimedScheduleInstance,
)
from corehq.messaging.scheduling.tasks import (
handle_case_timed_schedule_instance,
)
from corehq.messaging.scheduling.tests.util import (
delete_alert_schedules,
delete_timed_schedules,
)
from corehq.messaging.tasks import (
run_messaging_rule,
run_messaging_rule_for_shard,
sync_case_for_messaging,
sync_case_for_messaging_rule,
)
from corehq.sql_db.util import paginate_query_across_partitioned_databases
def get_visit_scheduler_module_and_form_for_test():
form = AdvancedForm(
schedule=FormSchedule(
unique_id='form-unique-id-1',
schedule_form_id='form1',
enabled=True,
visits=[
ScheduleVisit(due=1, starts=-1, expires=1, repeats=False, increment=None),
ScheduleVisit(due=7, starts=-2, expires=3, repeats=False, increment=None),
ScheduleVisit(due=None, starts=None, expires=None, repeats=True, increment=14),
],
)
)
module = AdvancedModule(
schedule_phases=[
SchedulePhase(anchor='edd', forms=[]),
SchedulePhase(anchor='add', forms=[SchedulePhaseForm(form_id=form.unique_id)]),
],
forms=[form],
)
return module, form
class CaseRuleSchedulingIntegrationTest(TestCase):
domain = 'case-rule-scheduling-test'
@classmethod
def setUpClass(cls):
super(CaseRuleSchedulingIntegrationTest, cls).setUpClass()
cls.domain_obj = Domain(
name=cls.domain,
default_timezone='America/New_York',
)
cls.domain_obj.save()
cls.user = CommCareUser.create(cls.domain, 'test1', 'abc', None, None)
@classmethod
def tearDownClass(cls):
cls.user.delete(cls.domain, deleted_by=None)
cls.domain_obj.delete()
super(CaseRuleSchedulingIntegrationTest, cls).tearDownClass()
def tearDown(self):
for rule in AutomaticUpdateRule.objects.filter(domain=self.domain):
rule.hard_delete()
for instance in paginate_query_across_partitioned_databases(
CaseAlertScheduleInstance, Q(domain=self.domain)):
delete_case_schedule_instance(instance)
for instance in paginate_query_across_partitioned_databases(
CaseTimedScheduleInstance, Q(domain=self.domain)):
delete_case_schedule_instance(instance)
delete_alert_schedules(self.domain)
delete_timed_schedules(self.domain)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_instance_creation(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),)
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match. On the first iteration, the instance is created. On the second,
# no new instance is created since it already exists.
for minute in [1, 2]:
utcnow_patch.return_value = datetime(2017, 5, 1, 7, minute)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 13, 0))
self.assertTrue(instances[0].active)
# Make the rule not match. Instance should no longer exist.
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 3)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'N'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_alert_schedule_instance_creation(self, utcnow_patch):
schedule = AlertSchedule.create_simple_alert(
self.domain,
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
alert_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),)
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match. On the first iteration, the instance is created. On the second,
# no new instance is created since it already exists.
for minute in range(1, 3):
utcnow_patch.return_value = datetime(2017, 5, 1, 7, minute)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].alert_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 7, 1))
self.assertTrue(instances[0].active)
# Make the rule not match. Instance should no longer exist.
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 3)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'N'})
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_alert_schedule_reset(self, utcnow_patch):
schedule = AlertSchedule.create_simple_alert(
self.domain,
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
alert_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
reset_case_property_name='reset_property',
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match. On the first iteration, the instance is created. On the second,
# nothing is changed.
for minute in (1, 2):
utcnow_patch.return_value = datetime(2017, 5, 1, 7, minute)
update_case(self.domain, case.case_id,
case_properties={'start_sending': 'Y', 'reset_property': 'a'})
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].alert_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 7, 1))
self.assertEqual(instances[0].last_reset_case_property_value, 'a')
self.assertTrue(instances[0].active)
# Update the reset property, and the instance is reset.
utcnow_patch.return_value = datetime(2017, 6, 1, 7, 0)
update_case(self.domain, case.case_id, case_properties={'reset_property': 'b'})
instances = get_case_alert_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].alert_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 6, 1, 7, 0))
self.assertEqual(instances[0].last_reset_case_property_value, 'b')
self.assertTrue(instances[0].active)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_reset(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
reset_case_property_name='reset_property',
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match. On the first iteration, the instance is created. On the second,
# no new instance is created since it already exists.
for day in [1, 2]:
utcnow_patch.return_value = datetime(2017, 5, day, 20, 0)
update_case(self.domain, case.case_id,
case_properties={'start_sending': 'Y', 'reset_property': '1'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 2))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 2, 13, 0))
self.assertTrue(instances[0].active)
# Change the value of 'reset_property', and the start date should be reset
utcnow_patch.return_value = datetime(2017, 5, 2, 20, 0)
update_case(self.domain, case.case_id, case_properties={'reset_property': '2'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 3))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 3, 13, 0))
self.assertTrue(instances[0].active)
# Make the rule not match. Instance should no longer exist.
utcnow_patch.return_value = datetime(2017, 5, 2, 20, 0)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'N'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.models.content.SMSContent.send')
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_stop_date_case_property(self, utcnow_patch, send_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
extra_options={'stop_date_case_property_name': 'stop_date'},
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2018, 7, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# The case matches the rule and is setup to start sending
update_case(self.domain, case.case_id, case_properties={'stop_date': '2018-07-03'})
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 1, 13, 0))
self.assertTrue(instance.active)
# Send the first event, and schedule the next event for the next day
utcnow_patch.return_value = datetime(2018, 7, 1, 13, 1)
handle_case_timed_schedule_instance(case.case_id, instance.schedule_instance_id, self.domain)
self.assertEqual(send_patch.call_count, 1)
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 2)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 2, 13, 0))
self.assertTrue(instance.active)
# Send the second event, and deactivate because the stop date has been reached
utcnow_patch.return_value = datetime(2018, 7, 2, 13, 1)
handle_case_timed_schedule_instance(case.case_id, instance.schedule_instance_id, self.domain)
self.assertEqual(send_patch.call_count, 2)
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 3)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 3, 13, 0))
self.assertFalse(instance.active)
# Update the stop date and the instance should be reactivated
update_case(self.domain, case.case_id, case_properties={'stop_date': '2018-08-01'})
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 3)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 3, 13, 0))
self.assertTrue(instance.active)
# Update the stop date and the instance should be deactivated
update_case(self.domain, case.case_id, case_properties={'stop_date': '2018-06-01'})
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 3)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 3, 13, 0))
self.assertFalse(instance.active)
# Update the stop date and the instance should be reactivated and fast-forwarded
utcnow_patch.return_value = datetime(2018, 7, 4, 13, 1)
update_case(self.domain, case.case_id, case_properties={'stop_date': '2018-08-01'})
[instance] = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 7, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 5)
self.assertEqual(instance.next_event_due, datetime(2018, 7, 5, 13, 0))
self.assertTrue(instance.active)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_start_date_case_property(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
total_iterations=1,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
start_date_case_property='appointment_date',
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match, but don't give a start date. No instances are created.
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Give a start date. On the first iteration, the instance is created. On the second,
# no new instance is created since it already exists.
for minute in [1, 2]:
utcnow_patch.return_value = datetime(2017, 5, 1, 7, minute)
update_case(self.domain, case.case_id, case_properties={'appointment_date': '2017-06-01'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 6, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 6, 1, 13, 0))
self.assertTrue(instances[0].active)
# Update start date. Instance is updated with new start date,
update_case(self.domain, case.case_id, case_properties={'appointment_date': '2017-07-01'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 7, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 7, 1, 13, 0))
self.assertTrue(instances[0].active)
# Set start date to the past. Instance is updated with new start date and is inactive
update_case(self.domain, case.case_id, case_properties={'appointment_date': '2017-04-01'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 4, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 2)
self.assertEqual(instances[0].next_event_due, datetime(2017, 4, 2, 13, 0))
self.assertFalse(instances[0].active)
# Give an invalid start date. Instance should no longer exist.
update_case(self.domain, case.case_id, case_properties={'appointment_date': 'xyz'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@contextmanager
def setup_timed_schedule_with_case(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
total_iterations=1,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
action, definition = rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
specific_start_date=date(2018, 3, 1),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2018, 2, 28, 7, 0)
with create_case(self.domain, 'person') as case:
yield schedule, rule, definition, case
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_specific_start_date(self, utcnow_patch):
setup = self.setup_timed_schedule_with_case(utcnow_patch)
with setup as (schedule, rule, definition, case):
# Rule does not match, no instances created
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match. On the first iteration, the instance is created. On the second,
# no new instance is created since it already exists.
for minute in [1, 2]:
utcnow_patch.return_value = datetime(2018, 2, 28, 7, minute)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2018, 3, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2018, 3, 1, 14, 0))
self.assertTrue(instances[0].active)
# Update start date. Instance is updated with new start date.
definition.specific_start_date = date(2018, 4, 1)
definition.save()
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
sync_case_for_messaging_rule(self.domain, case.case_id, rule.pk)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2018, 4, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2018, 4, 1, 13, 0))
self.assertTrue(instances[0].active)
# Set start date to the past. Instance is updated with new start date and is inactive.
definition.specific_start_date = date(2018, 2, 1)
definition.save()
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
sync_case_for_messaging_rule(self.domain, case.case_id, rule.pk)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2018, 2, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 2)
self.assertEqual(instances[0].next_event_due, datetime(2018, 2, 2, 14, 0))
self.assertFalse(instances[0].active)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_sync_rule_on_hard_deleted_case(self, utcnow_patch):
setup = self.setup_timed_schedule_with_case(utcnow_patch)
with setup as (schedule, rule, definition, case):
utcnow_patch.return_value = datetime(2018, 2, 28, 7, 1)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
sync_case_for_messaging_rule(self.domain, case.case_id, rule.pk)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_sync_messaging_on_hard_deleted_case(self, utcnow_patch):
setup = self.setup_timed_schedule_with_case(utcnow_patch)
with setup as (schedule, rule, definition, case):
utcnow_patch.return_value = datetime(2018, 2, 28, 7, 1)
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
sync_case_for_messaging(self.domain, case.case_id)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_timed_schedule_case_property_timed_event(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
CasePropertyTimedEvent(case_property_name='reminder_time'),
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_criteria(
MatchPropertyDefinition,
property_name='start_sending',
property_value='Y',
match_type=MatchPropertyDefinition.MATCH_EQUAL,
)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 5, 1, 7, 0)
with create_case(self.domain, 'person') as case:
# Rule does not match, no instances created
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
# Make the rule match, but don't give a preferred time. Default scheduling time is used.
update_case(self.domain, case.case_id, case_properties={'start_sending': 'Y'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 16, 0))
self.assertTrue(instances[0].active)
# Update the preferred time, and the schedule should recalculate
update_case(self.domain, case.case_id, case_properties={'reminder_time': '09:00'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 13, 0))
self.assertTrue(instances[0].active)
# Update the preferred time to a bad value and the default time is used again.
update_case(self.domain, case.case_id, case_properties={'reminder_time': 'x'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 5, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 5, 1, 16, 0))
self.assertTrue(instances[0].active)
@patch('corehq.apps.data_interfaces.models.'
'VisitSchedulerIntegrationHelper.get_visit_scheduler_module_and_form')
@patch('corehq.messaging.scheduling.util.utcnow')
def test_visit_scheduler_integration(self, utcnow_patch, module_and_form_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
total_iterations=1,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
_, definition = rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),)
)
module, form = get_visit_scheduler_module_and_form_for_test()
definition.set_scheduler_module_info(CreateScheduleInstanceActionDefinition.SchedulerModuleInfo(
enabled=True,
app_id='n/a for test',
form_unique_id=form.unique_id,
visit_number=1,
window_position=VISIT_WINDOW_START,
))
definition.save()
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 8, 1, 7, 0)
module_and_form_patch.return_value = module, form
with create_case(self.domain, 'person') as case:
# Schedule phase does not match, nothing is scheduled
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
update_case(self.domain, case.case_id,
case_properties={'add': '2017-08-01', 'current_schedule_phase': '2'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 6))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 6, 13, 0))
self.assertTrue(instances[0].active)
# If the anchor date gets updated (due to correction, for example), the schedule recalculates
update_case(self.domain, case.case_id, case_properties={'add': '2017-08-10'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 15))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 15, 13, 0))
self.assertTrue(instances[0].active)
# If the anchor date is in the past, the schedule instance is deactivated
update_case(self.domain, case.case_id, case_properties={'add': '2017-07-01'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 7, 6))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 2)
self.assertEqual(instances[0].next_event_due, datetime(2017, 7, 7, 13, 0))
self.assertFalse(instances[0].active)
# If the anchor date is reset, the schedule instance is reactivated
update_case(self.domain, case.case_id, case_properties={'add': '2017-08-01'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 6))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 6, 13, 0))
self.assertTrue(instances[0].active)
# Making an arbitrary update doesn't cause any recalculating to happen
with patch('corehq.messaging.scheduling.scheduling_partitioned.models.'
'AbstractTimedScheduleInstance.recalculate_schedule') as recalculate_patch:
update_case(self.domain, case.case_id, case_properties={'new_property': 'new value'})
self.assertEqual(recalculate_patch.call_count, 0)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 6))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 6, 13, 0))
self.assertTrue(instances[0].active)
# Terminate the schedule, no more schedule instances should be scheduled
update_case(self.domain, case.case_id, case_properties={'current_schedule_phase': '-1'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 0)
@patch('corehq.messaging.scheduling.util.utcnow')
def test_start_offset(self, utcnow_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
start_offset=2,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
utcnow_patch.return_value = datetime(2017, 8, 1, 15, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 3, 13, 0))
self.assertEqual(instances[0].schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instances[0].active)
# Change the schedule's start offset and force a case update to reprocess the schedule instance.
# The start date should not change, but the schedule instance should respond to the new start offset
# by calculating a new next_event_due timestamp.
schedule.start_offset = 5
schedule.save()
schedule = TimedSchedule.objects.get(schedule_id=schedule.schedule_id)
utcnow_patch.return_value = datetime(2017, 8, 4, 7, 0)
update_case(self.domain, case.case_id, case_properties={'new_property': 'new value'})
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 6, 13, 0))
self.assertEqual(instances[0].schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instances[0].active)
# Making another arbitrary update doesn't cause any recalculating to happen
with patch('corehq.messaging.scheduling.scheduling_partitioned.models.'
'AbstractTimedScheduleInstance.recalculate_schedule') as recalculate_patch:
update_case(self.domain, case.case_id, case_properties={'new_property': 'new value 2'})
self.assertEqual(recalculate_patch.call_count, 0)
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
self.assertEqual(instances[0].case_id, case.case_id)
self.assertEqual(instances[0].rule_id, rule.pk)
self.assertEqual(instances[0].timed_schedule_id, schedule.schedule_id)
self.assertEqual(instances[0].start_date, date(2017, 8, 1))
self.assertEqual(instances[0].domain, self.domain)
self.assertEqual(instances[0].recipient_type, 'CommCareUser')
self.assertEqual(instances[0].recipient_id, self.user.get_id)
self.assertEqual(instances[0].current_event_num, 0)
self.assertEqual(instances[0].schedule_iteration_num, 1)
self.assertEqual(instances[0].next_event_due, datetime(2017, 8, 6, 13, 0))
self.assertEqual(instances[0].schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instances[0].active)
def _setup_rule(self):
schedule = AlertSchedule.create_simple_alert(
self.domain,
SMSContent(message={'en': 'Hello'})
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rule.add_action(
CreateScheduleInstanceActionDefinition,
alert_schedule_id=schedule.schedule_id,
recipients=(('Self', None),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
return rule.pk
@patch('corehq.messaging.tasks.sync_case_chunk_for_messaging_rule.delay')
@patch('corehq.messaging.tasks.run_messaging_rule_for_shard.delay')
@patch('corehq.apps.es.es_query.ESQuery.count', return_value=10)
def test_run_messaging_rule_sharded(self, es_patch, shard_rule_patch, sync_patch):
rule_id = self._setup_rule()
with create_case(self.domain, 'person') as case1, create_case(self.domain, 'person') as case2:
run_messaging_rule(self.domain, rule_id)
shard_rule_patch.assert_has_calls(
[
call(self.domain, rule_id, 'default')
],
any_order=True
)
run_messaging_rule_for_shard(self.domain, rule_id, 'default')
sync_patch.assert_has_calls(
[
call(self.domain, (case1.case_id, case2.case_id), rule_id)
],
any_order=True
)
self.assertEqual(es_patch.call_count, 1)
@patch('corehq.messaging.scheduling.models.content.SMSContent.send')
@patch('corehq.messaging.scheduling.util.utcnow')
def test_next_available_daily_slot(self, utcnow_patch, send_patch):
schedule = TimedSchedule.create_simple_daily_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
total_iterations=2,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# Leave all start date information blank so it schedules for the next available daily slot
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# It's 3/1 at 7am local time; the schedule instance gets scheduled for the same day
utcnow_patch.return_value = datetime(2018, 3, 1, 12, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 1, 14, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# It's 3/1 at 10am local time; the schedule instance gets scheduled for the next day
utcnow_patch.return_value = datetime(2018, 3, 1, 15, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 2))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 2, 14, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# Fire the event
utcnow_patch.return_value = datetime(2018, 3, 2, 14, 1)
instance.handle_current_event()
self.assertEqual(send_patch.call_count, 1)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 2))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 2)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 3, 14, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
@patch('corehq.messaging.scheduling.models.content.SMSContent.send')
@patch('corehq.messaging.scheduling.util.utcnow')
def test_next_available_weekly_slot(self, utcnow_patch, send_patch):
# Mondays with the week starting on Monday
schedule = TimedSchedule.create_simple_weekly_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
SMSContent(message={'en': 'Hello'}),
[0],
0,
total_iterations=2,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# Leave all start date information blank so it schedules for the next available weekly slot
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# It's 3/4 at 10pm local time; the schedule instance gets scheduled for the same week
utcnow_patch.return_value = datetime(2018, 3, 5, 3, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 4))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 5, 14, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# It's 3/5 at 10pm local time; the schedule instance gets scheduled for the next week
utcnow_patch.return_value = datetime(2018, 3, 6, 3, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 12))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 12, 13, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# Fire the event
utcnow_patch.return_value = datetime(2018, 3, 12, 13, 1)
instance.handle_current_event()
self.assertEqual(send_patch.call_count, 1)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 12))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 2)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 19, 13, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
@patch('corehq.messaging.scheduling.models.content.SMSContent.send')
@patch('corehq.messaging.scheduling.util.utcnow')
def test_next_available_monthly_slot(self, utcnow_patch, send_patch):
schedule = TimedSchedule.create_simple_monthly_schedule(
self.domain,
TimedEvent(time=time(9, 0)),
[15],
SMSContent(message={'en': 'Hello'}),
total_iterations=2,
)
rule = create_empty_rule(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# Leave all start date information blank so it schedules for the next available monthly slot
rule.add_action(
CreateScheduleInstanceActionDefinition,
timed_schedule_id=schedule.schedule_id,
recipients=(('CommCareUser', self.user.get_id),),
)
AutomaticUpdateRule.clear_caches(self.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
# It's 3/4 at 5pm local time; the schedule instance gets scheduled for the same month
utcnow_patch.return_value = datetime(2018, 3, 4, 22, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 3, 4))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 3, 15, 13, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# It's 3/16 at 5pm local time; the schedule instance gets scheduled for the next month
utcnow_patch.return_value = datetime(2018, 3, 16, 21, 0)
with create_case(self.domain, 'person') as case:
instances = get_case_timed_schedule_instances_for_schedule(case.case_id, schedule)
self.assertEqual(instances.count(), 1)
instance = instances[0]
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 4, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 1)
self.assertEqual(instance.next_event_due, datetime(2018, 4, 15, 13, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
# Fire the event
utcnow_patch.return_value = datetime(2018, 4, 15, 13, 1)
instance.handle_current_event()
self.assertEqual(send_patch.call_count, 1)
self.assertEqual(instance.case_id, case.case_id)
self.assertEqual(instance.rule_id, rule.pk)
self.assertEqual(instance.timed_schedule_id, schedule.schedule_id)
self.assertEqual(instance.start_date, date(2018, 4, 1))
self.assertEqual(instance.domain, self.domain)
self.assertEqual(instance.recipient_type, 'CommCareUser')
self.assertEqual(instance.recipient_id, self.user.get_id)
self.assertEqual(instance.current_event_num, 0)
self.assertEqual(instance.schedule_iteration_num, 2)
self.assertEqual(instance.next_event_due, datetime(2018, 5, 15, 13, 0))
self.assertEqual(instance.schedule_revision, schedule.get_schedule_revision())
self.assertTrue(instance.active)
class VisitSchedulerIntegrationHelperTestCase(TestCase):
domain = 'visit-scheduler-integration-helper'
@classmethod
def setUpClass(cls):
cls.module, cls.form = get_visit_scheduler_module_and_form_for_test()
super(VisitSchedulerIntegrationHelperTestCase, cls).setUpClass()
def get_helper(self, case, visit_number=2, window_position=VISIT_WINDOW_START):
return VisitSchedulerIntegrationHelper(
case,
CreateScheduleInstanceActionDefinition.SchedulerModuleInfo(
enabled=True,
app_id='n/a for test',
form_unique_id=self.form.unique_id,
visit_number=visit_number,
window_position=window_position,
)
)
def test_get_visit_scheduler_form_phase(self):
with create_case(self.domain, 'person') as case:
phase_num, phase = self.get_helper(case).get_visit_scheduler_form_phase(self.module)
self.assertEqual(phase_num, 2)
self.assertEqual(phase.to_json(), self.module.schedule_phases[1].to_json())
def test_calculate_window_date(self):
with create_case(self.domain, 'person') as case:
helper = self.get_helper(case, window_position=VISIT_WINDOW_START)
self.assertEqual(
helper.calculate_window_date(self.form.schedule.visits[1], date(2017, 8, 1)),
date(2017, 7, 30)
)
helper = self.get_helper(case, window_position=VISIT_WINDOW_DUE_DATE)
self.assertEqual(
helper.calculate_window_date(self.form.schedule.visits[1], date(2017, 8, 1)),
date(2017, 8, 1)
)
helper = self.get_helper(case, window_position=VISIT_WINDOW_END)
self.assertEqual(
helper.calculate_window_date(self.form.schedule.visits[1], date(2017, 8, 1)),
date(2017, 8, 4)
)
def test_get_case_current_schedule_phase(self):
with create_case(self.domain, 'person') as case:
helper = self.get_helper(case)
self.assertIsNone(helper.get_case_current_schedule_phase())
update_case(self.domain, case.case_id, case_properties={'current_schedule_phase': '2'})
case = CaseAccessors(self.domain).get_case(case.case_id)
helper = self.get_helper(case)
self.assertEqual(helper.get_case_current_schedule_phase(), 2)
def test_get_visit(self):
with create_case(self.domain, 'person') as case:
helper = self.get_helper(case, visit_number=1)
self.assertEqual(
helper.get_visit(self.form).to_json(),
self.form.schedule.visits[1].to_json()
)
# Repeat visits aren't supported
helper = self.get_helper(case, visit_number=2)
with self.assertRaises(VisitSchedulerIntegrationHelper.VisitSchedulerIntegrationException):
helper.get_visit(self.form)
# Index out of range
helper = self.get_helper(case, visit_number=999)
with self.assertRaises(VisitSchedulerIntegrationHelper.VisitSchedulerIntegrationException):
helper.get_visit(self.form)
def test_get_anchor_date(self):
with create_case(self.domain, 'person') as case:
helper = self.get_helper(case)
with self.assertRaises(VisitSchedulerIntegrationHelper.VisitSchedulerIntegrationException):
helper.get_anchor_date('add')
update_case(self.domain, case.case_id, case_properties={'add': '2017-08-01'})
case = CaseAccessors(self.domain).get_case(case.case_id)
helper = self.get_helper(case)
self.assertEqual(helper.get_anchor_date('add'), date(2017, 8, 1))
| 52.034261
| 112
| 0.670219
| 8,535
| 72,900
| 5.495723
| 0.04089
| 0.145184
| 0.142754
| 0.123652
| 0.901377
| 0.887648
| 0.875538
| 0.860615
| 0.847098
| 0.836076
| 0
| 0.027066
| 0.231166
| 72,900
| 1,400
| 113
| 52.071429
| 0.809824
| 0.055953
| 0
| 0.72134
| 0
| 0
| 0.045451
| 0.021016
| 0
| 0
| 0
| 0
| 0.439153
| 1
| 0.025573
| false
| 0
| 0.018519
| 0.000882
| 0.050265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc10f0adab6fe221bbde17810519cdd715ae6ab7
| 2,198
|
py
|
Python
|
lib/systems/coronene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/coronene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/systems/coronene.py
|
pulsar-chem/BPModule
|
f8e64e04fdb01947708f098e833600c459c2ff0e
|
[
"BSD-3-Clause"
] | null | null | null |
import pulsar as psr
def load_ref_system():
""" Returns coronene as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C 1.22127 0.70510 0.00000
C 1.22127 -0.70510 0.00000
C 0.00000 1.41021 0.00000
C 2.44340 1.41070 0.00000
C -0.00000 -1.41021 -0.00000
C -1.22127 0.70510 -0.00000
C 2.44340 -1.41070 0.00000
C 0.00000 2.82140 0.00000
C 3.65444 0.70022 0.00000
C 2.43363 2.81472 0.00000
C 3.65444 -0.70022 0.00000
C -1.22127 -0.70510 -0.00000
C 1.22081 3.51495 0.00000
C -0.00000 -2.82140 -0.00000
C -2.44340 1.41070 -0.00000
C 2.43363 -2.81472 0.00000
C -1.22081 3.51495 -0.00000
C -2.44340 -1.41070 -0.00000
C 1.22081 -3.51495 0.00000
C -2.43363 2.81472 -0.00000
C -1.22081 -3.51495 -0.00000
C -3.65444 0.70022 -0.00000
C -2.43363 -2.81472 -0.00000
C -3.65444 -0.70022 -0.00000
H 4.59989 1.22945 0.00000
H 3.36468 3.36890 0.00000
H 4.59989 -1.22945 0.00000
H 1.23521 4.59835 0.00000
H 3.36468 -3.36890 0.00000
H -1.23521 4.59835 -0.00000
H 1.23521 -4.59835 0.00000
H -3.36468 3.36890 -0.00000
H -1.23521 -4.59835 -0.00000
H -4.59989 1.22945 -0.00000
H -3.36468 -3.36890 -0.00000
H -4.59989 -1.22945 -0.00000
""")
| 49.954545
| 64
| 0.375341
| 281
| 2,198
| 2.925267
| 0.181495
| 0.291971
| 0.195864
| 0.077859
| 0.832117
| 0.832117
| 0.832117
| 0.832117
| 0.832117
| 0.816302
| 0
| 0.640316
| 0.539581
| 2,198
| 43
| 65
| 51.116279
| 0.171937
| 0.047771
| 0
| 0
| 0
| 0
| 0.959479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| true
| 0
| 0.025
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
90beaf0fde45b0f4b810177c00a21c432ac43423
| 123
|
py
|
Python
|
osscla/routes/__init__.py
|
dschaller/osscla
|
b1ff167d65548e13ccf89babc9fbba21c70458c1
|
[
"Apache-2.0"
] | 6
|
2017-12-07T06:43:52.000Z
|
2021-09-03T19:15:29.000Z
|
osscla/routes/__init__.py
|
dschaller/osscla
|
b1ff167d65548e13ccf89babc9fbba21c70458c1
|
[
"Apache-2.0"
] | 21
|
2017-08-07T20:03:40.000Z
|
2022-01-09T01:39:47.000Z
|
osscla/routes/__init__.py
|
dschaller/osscla
|
b1ff167d65548e13ccf89babc9fbba21c70458c1
|
[
"Apache-2.0"
] | 5
|
2017-08-05T05:14:08.000Z
|
2020-11-27T10:44:52.000Z
|
from __future__ import absolute_import
from osscla.routes import static_files # noqa
from osscla.routes import v1 # noqa
| 30.75
| 46
| 0.821138
| 18
| 123
| 5.277778
| 0.555556
| 0.210526
| 0.336842
| 0.463158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009524
| 0.146341
| 123
| 3
| 47
| 41
| 0.895238
| 0.073171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
90c69fc273c2ebd4f62fcee59a4ebe0c69a663f7
| 607
|
py
|
Python
|
src/data.py
|
ZauJulio/VowelRecognition
|
73e56d9f5acc581ad7dc9163e3617702dc6caadb
|
[
"MIT"
] | null | null | null |
src/data.py
|
ZauJulio/VowelRecognition
|
73e56d9f5acc581ad7dc9163e3617702dc6caadb
|
[
"MIT"
] | null | null | null |
src/data.py
|
ZauJulio/VowelRecognition
|
73e56d9f5acc581ad7dc9163e3617702dc6caadb
|
[
"MIT"
] | null | null | null |
vowels = {
'a': [
1, 1, 1,
1, 0, 1,
1, 1, 1,
1, 0, 1,
1, 0, 1],
'e': [
1, 1, 1,
1, 0, 0,
1, 1, 1,
1, 0, 0,
1, 1, 1],
'i': [
0, 1, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0],
'o': [
1, 1, 1,
1, 0, 1,
1, 0, 1,
1, 0, 1,
1, 1, 1],
'u': [
1, 0, 1,
1, 0, 1,
1, 0, 1,
1, 0, 1,
1, 1, 1]
}
##################################################
# Separation of the vowel dictionary keys
keys = [*vowels]
| 16.861111
| 50
| 0.214168
| 89
| 607
| 1.460674
| 0.157303
| 0.461538
| 0.369231
| 0.276923
| 0.576923
| 0.576923
| 0.576923
| 0.530769
| 0.530769
| 0.330769
| 0
| 0.261324
| 0.527183
| 607
| 36
| 51
| 16.861111
| 0.191638
| 0.06425
| 0
| 0.666667
| 0
| 0
| 0.009671
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
90f5bdebc9002a61567841fb596edfd3b44e2b42
| 17,594
|
py
|
Python
|
bittrex/test/bittrex_tests.py
|
kdmukai/python-bittrex
|
d6587ba058523ae3d3c42c16fd874b775f8072f6
|
[
"MIT"
] | 692
|
2015-04-17T19:13:53.000Z
|
2022-03-10T04:51:08.000Z
|
bittrex/test/bittrex_tests.py
|
kdmukai/python-bittrex
|
d6587ba058523ae3d3c42c16fd874b775f8072f6
|
[
"MIT"
] | 137
|
2015-08-17T00:43:18.000Z
|
2020-07-22T06:13:16.000Z
|
bittrex/test/bittrex_tests.py
|
kdmukai/python-bittrex
|
d6587ba058523ae3d3c42c16fd874b775f8072f6
|
[
"MIT"
] | 416
|
2015-03-18T22:15:09.000Z
|
2022-02-22T22:02:29.000Z
|
import unittest
import json
from bittrex.bittrex import Bittrex, API_V2_0, API_V1_1, BUY_ORDERBOOK, TICKINTERVAL_ONEMIN
try:
open("secrets.json").close()
IS_CI_ENV = False
except Exception:
IS_CI_ENV = True
def test_basic_response(unit_test, result, method_name):
unit_test.assertTrue(result['success'], "{0:s} failed".format(method_name))
unit_test.assertTrue(result['message'] is not None, "message not present in response")
unit_test.assertTrue(result['result'] is not None, "result not present in response")
def test_auth_basic_failures(unit_test, result, test_type):
unit_test.assertFalse(result['success'], "{0:s} failed".format(test_type))
unit_test.assertTrue('invalid' in str(result['message']).lower(), "{0:s} failed response message".format(test_type))
unit_test.assertIsNone(result['result'], "{0:s} failed response result not None".format(test_type))
class TestBittrexV11PublicAPI(unittest.TestCase):
"""
Integration tests for the Bittrex public API.
These will fail in the absence of an internet connection or if bittrex API goes down
"""
def setUp(self):
self.bittrex = Bittrex(None, None, api_version=API_V1_1)
def test_handles_none_key_or_secret(self):
self.bittrex = Bittrex(None, None)
# could call any public method here
actual = self.bittrex.get_markets()
self.assertTrue(actual['success'], "failed with None key and None secret")
self.bittrex = Bittrex("123", None)
actual = self.bittrex.get_markets()
self.assertTrue(actual['success'], "failed with None secret")
self.bittrex = Bittrex(None, "123")
actual = self.bittrex.get_markets()
self.assertTrue(actual['success'], "failed with None key")
def test_get_markets(self):
actual = self.bittrex.get_markets()
test_basic_response(self, actual, "get_markets")
self.assertTrue(isinstance(actual['result'], list), "result is not a list")
self.assertTrue(len(actual['result']) > 0, "result list is 0-length")
def test_get_currencies(self):
actual = self.bittrex.get_currencies()
test_basic_response(self, actual, "get_currencies")
def test_get_ticker(self):
actual = self.bittrex.get_ticker(market='BTC-LTC')
test_basic_response(self, actual, "get_ticker")
def test_get_market_summaries(self):
actual = self.bittrex.get_market_summaries()
test_basic_response(self, actual, "get_market_summaries")
def test_get_orderbook(self):
actual = self.bittrex.get_orderbook('BTC-LTC', depth_type=BUY_ORDERBOOK)
test_basic_response(self, actual, "get_orderbook")
def test_get_market_history(self):
actual = self.bittrex.get_market_history('BTC-LTC')
test_basic_response(self, actual, "get_market_history")
def test_list_markets_by_currency(self):
actual = self.bittrex.list_markets_by_currency('LTC')
self.assertListEqual(['BTC-LTC', 'ETH-LTC', 'USDT-LTC'], actual)
def test_get_wallet_health(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_wallet_health)
def test_get_balance_distribution(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_balance_distribution)
def test_get_candles(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_candles, market='BTC-LTC',
tick_interval=TICKINTERVAL_ONEMIN)
def test_get_latest_candle(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_latest_candle, market='BTC-LTC',
tick_interval=TICKINTERVAL_ONEMIN)
class TestBittrexV20PublicAPI(unittest.TestCase):
"""
Integration tests for the Bittrex public API.
These will fail in the absence of an internet connection or if bittrex API goes down
"""
def setUp(self):
self.bittrex = Bittrex(None, None, api_version=API_V2_0)
def test_handles_none_key_or_secret(self):
self.bittrex = Bittrex(None, None, api_version=API_V2_0)
# could call any public method here
actual = self.bittrex.get_market_summaries()
self.assertTrue(actual['success'], "failed with None key and None secret")
self.bittrex = Bittrex("123", None, api_version=API_V2_0)
actual = self.bittrex.get_market_summaries()
self.assertTrue(actual['success'], "failed with None secret")
self.bittrex = Bittrex(None, "123", api_version=API_V2_0)
actual = self.bittrex.get_market_summaries()
self.assertTrue(actual['success'], "failed with None key")
def test_get_currencies(self):
actual = self.bittrex.get_currencies()
test_basic_response(self, actual, "get_currencies")
def test_get_ticker(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_ticker,
market='BTC-LTC')
def test_get_market_summaries(self):
actual = self.bittrex.get_market_summaries()
test_basic_response(self, actual, "get_market_summaries")
def test_get_market_summary(self):
actual = self.bittrex.get_market_summary(market='BTC-LTC')
test_basic_response(self, actual, "get_market_summary")
def test_get_orderbook(self):
actual = self.bittrex.get_orderbook('BTC-LTC')
test_basic_response(self, actual, "get_orderbook")
def test_get_wallet_health(self):
actual = self.bittrex.get_wallet_health()
test_basic_response(self, actual, "get_wallet_health")
self.assertIsInstance(actual['result'], list)
@unittest.skip("Endpoint 404s. Is this still a valid 2.0 API?")
def test_get_balance_distribution(self):
actual = self.bittrex.get_balance_distribution()
test_basic_response(self, actual, "get_balance_distribution")
self.assertIsInstance(actual['result'], list)
def test_get_candles(self):
actual = self.bittrex.get_candles('BTC-LTC', tick_interval=TICKINTERVAL_ONEMIN)
test_basic_response(self, actual, "test_get_candles")
self.assertIsInstance(actual['result'], list)
def test_get_latest_candle(self):
actual = self.bittrex.get_latest_candle('BTC-LTC', tick_interval=TICKINTERVAL_ONEMIN)
test_basic_response(self, actual, "test_get_latest_candle")
self.assertIsInstance(actual['result'], list)
@unittest.skipIf(IS_CI_ENV, 'no account secrets uploaded in CI envieonment, TODO')
class TestBittrexV11AccountAPI(unittest.TestCase):
"""
Integration tests for the Bittrex Account API.
* These will fail in the absence of an internet connection or if bittrex API goes down.
* They require a valid API key and secret issued by Bittrex.
* They also require the presence of a JSON file called secrets.json.
It is structured as such:
{
"key": "12341253456345",
"secret": "3345745634234534"
}
"""
def setUp(self):
with open("secrets.json") as secrets_file:
self.secrets = json.load(secrets_file)
secrets_file.close()
self.bittrex = Bittrex(self.secrets['key'], self.secrets['secret'])
def test_handles_invalid_key_or_secret(self):
self.bittrex = Bittrex('invalidkey', self.secrets['secret'])
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'Invalid key, valid secret')
self.bittrex = Bittrex(None, self.secrets['secret'])
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'None key, valid secret')
self.bittrex = Bittrex(self.secrets['key'], 'invalidsecret')
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'valid key, invalid secret')
self.bittrex = Bittrex(self.secrets['key'], None)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'valid key, None secret')
self.bittrex = Bittrex('invalidkey', 'invalidsecret')
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'invalid key, invalid secret')
def test_get_openorders(self):
actual = self.bittrex.get_open_orders('BTC-LTC')
test_basic_response(self, actual, "get_openorders")
self.assertTrue(isinstance(actual['result'], list), "result is not a list")
def test_get_balances(self):
actual = self.bittrex.get_balances()
test_basic_response(self, actual, "get_balances")
self.assertTrue(isinstance(actual['result'], list), "result is not a list")
def test_get_balance(self):
actual = self.bittrex.get_balance('BTC')
test_basic_response(self, actual, "get_balance")
self.assertTrue(isinstance(actual['result'], dict), "result is not a dict")
self.assertEqual(actual['result']['Currency'],
"BTC",
"requested currency {0:s} does not match returned currency {1:s}"
.format("BTC", actual['result']['Currency']))
def test_get_depositaddress(self):
actual = self.bittrex.get_deposit_address('BTC')
if not actual['success']:
self.assertTrue(actual['message'], 'ADDRESS_GENERATING')
else:
test_basic_response(self, actual, "get_deposit_address")
def test_get_order_history_all_markets(self):
actual = self.bittrex.get_order_history()
test_basic_response(self, actual, "get_order_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_order_history_one_market(self):
actual = self.bittrex.get_order_history(market='BTC-LTC')
test_basic_response(self, actual, "get_order_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_withdrawlhistory_all_currencies(self):
actual = self.bittrex.get_withdrawal_history()
test_basic_response(self, actual, "get_withdrawal_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_withdrawlhistory_one_currency(self):
actual = self.bittrex.get_withdrawal_history('BTC')
test_basic_response(self, actual, "get_withdrawal_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_deposithistory_all_currencies(self):
actual = self.bittrex.get_deposit_history()
test_basic_response(self, actual, "get_deposit_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_deposithistory_one_currency(self):
actual = self.bittrex.get_deposit_history('BTC')
test_basic_response(self, actual, "get_deposit_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_pending_withdrawals(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_pending_withdrawals)
def test_get_pending_deposits(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.get_pending_deposits)
def test_generate_deposit_address(self):
self.assertRaisesRegexp(Exception, 'method call not available', self.bittrex.generate_deposit_address, currency='BTC')
@unittest.skipIf(IS_CI_ENV, 'no account secrets uploaded in CI envieonment, TODO')
class TestBittrexV20AccountAPI(unittest.TestCase):
"""
Integration tests for the Bittrex Account API.
* These will fail in the absence of an internet connection or if bittrex API goes down.
* They require a valid API key and secret issued by Bittrex.
* They also require the presence of a JSON file called secrets.json.
It is structured as such:
{
"key": "12341253456345",
"secret": "3345745634234534"
}
"""
def setUp(self):
with open("secrets.json") as secrets_file:
self.secrets = json.load(secrets_file)
secrets_file.close()
self.bittrex = Bittrex(self.secrets['key'], self.secrets['secret'], api_version=API_V2_0)
def test_handles_invalid_key_or_secret(self):
self.bittrex = Bittrex('invalidkey', self.secrets['secret'], api_version=API_V2_0)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'Invalid key, valid secret')
self.bittrex = Bittrex(None, self.secrets['secret'], api_version=API_V2_0)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'None key, valid secret')
self.bittrex = Bittrex(self.secrets['key'], 'invalidsecret', api_version=API_V2_0)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'valid key, invalid secret')
self.bittrex = Bittrex(self.secrets['key'], None, api_version=API_V2_0)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'valid key, None secret')
self.bittrex = Bittrex('invalidkey', 'invalidsecret', api_version=API_V2_0)
actual = self.bittrex.get_balance('BTC')
test_auth_basic_failures(self, actual, 'invalid key, invalid secret')
def test_get_openorders(self):
actual = self.bittrex.get_open_orders('BTC-LTC')
test_basic_response(self, actual, "get_openorders")
self.assertTrue(isinstance(actual['result'], list), "result is not a list")
def test_get_balances(self):
actual = self.bittrex.get_balances()
test_basic_response(self, actual, "get_balances")
self.assertTrue(isinstance(actual['result'], list), "result is not a list")
@unittest.skip("the return result is an empty dict. API bug? the 2.0 get_balances works as expected")
def test_get_balance(self):
actual = self.bittrex.get_balance('BTC')
test_basic_response(self, actual, "get_balance")
self.assertTrue(isinstance(actual['result'], dict), "result is not a dict")
self.assertEqual(actual['result']['Currency'],
"BTC",
"requested currency {0:s} does not match returned currency {1:s}"
.format("BTC", actual['result']['Currency']))
@unittest.skip("my testing account is acting funny this should work")
def test_get_depositaddress(self):
actual = self.bittrex.get_deposit_address('BTC')
test_basic_response(self, actual, "get_deposit_address")
def test_get_order_history_all_markets(self):
actual = self.bittrex.get_order_history()
test_basic_response(self, actual, "get_order_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_order_history_one_market(self):
actual = self.bittrex.get_order_history(market='BTC-LTC')
test_basic_response(self, actual, "get_order_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_withdrawlhistory_all_currencies(self):
actual = self.bittrex.get_withdrawal_history()
test_basic_response(self, actual, "get_withdrawal_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_withdrawlhistory_one_currency(self):
actual = self.bittrex.get_withdrawal_history('BTC')
test_basic_response(self, actual, "get_withdrawal_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_deposithistory_all_currencies(self):
actual = self.bittrex.get_deposit_history()
test_basic_response(self, actual, "get_deposit_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_deposithistory_one_currency(self):
actual = self.bittrex.get_deposit_history('BTC')
test_basic_response(self, actual, "get_deposit_history")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_pending_withdrawals_all_currencies(self):
actual = self.bittrex.get_pending_withdrawals()
test_basic_response(self, actual, "get_pending_withdrawals")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_pending_withdrawals_one_currency(self):
actual = self.bittrex.get_pending_withdrawals('BTC')
test_basic_response(self, actual, "get_pending_withdrawals")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_pending_deposits_all_currencies(self):
actual = self.bittrex.get_pending_deposits()
test_basic_response(self, actual, "get_pending_deposits")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_get_pending_deposits_one_currency(self):
actual = self.bittrex.get_pending_deposits('BTC')
test_basic_response(self, actual, "get_pending_deposits")
self.assertIsInstance(actual['result'], list, "result is not a list")
def test_generate_deposit_address(self):
actual = self.bittrex.generate_deposit_address(currency='BTC')
test_basic_response(self, actual, "generate_deposit_address")
self.assertIsInstance(actual['result'], list, "result is not a list")
if __name__ == '__main__':
unittest.main()
| 45.345361
| 126
| 0.696487
| 2,225
| 17,594
| 5.26382
| 0.083596
| 0.07599
| 0.072917
| 0.092213
| 0.901127
| 0.875256
| 0.824539
| 0.801144
| 0.776725
| 0.765454
| 0
| 0.008682
| 0.194725
| 17,594
| 387
| 127
| 45.462532
| 0.81797
| 0.059793
| 0
| 0.665441
| 0
| 0
| 0.189054
| 0.012433
| 0
| 0
| 0
| 0
| 0.194853
| 1
| 0.213235
| false
| 0
| 0.011029
| 0
| 0.238971
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
290e6e26fc6a1ae1625b9ee5fad3ae040cb325fa
| 10,811
|
py
|
Python
|
model-optimizer/extensions/ops/switch_test.py
|
fujunwei/dldt
|
09497b7724de4be92629f7799b8538b483d809a2
|
[
"Apache-2.0"
] | 1
|
2021-07-30T17:03:50.000Z
|
2021-07-30T17:03:50.000Z
|
model-optimizer/extensions/ops/switch_test.py
|
fujunwei/dldt
|
09497b7724de4be92629f7799b8538b483d809a2
|
[
"Apache-2.0"
] | null | null | null |
model-optimizer/extensions/ops/switch_test.py
|
fujunwei/dldt
|
09497b7724de4be92629f7799b8538b483d809a2
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from unittest.mock import Mock, call
import numpy as np
from extensions.ops.switch import Switch
from mo.graph.graph import Node
from mo.utils.unittest.graph import build_graph_with_edge_attrs, build_graph_with_attrs
from mo.utils.ir_engine.compare_graphs import compare_graphs
class TestSwitch(unittest.TestCase):
def test_switch_infer_with_condition(self):
nodes = [
('tensor', {'value': np.zeros((3, 3)), 'kind': 'data', 'executable': True, 'shape': np.array([3, 3])}),
('pred_id', {'value': True, 'kind': 'data', 'executable': True}),
('switch', {'type': 'Switch', 'kind': 'op', 'op': 'Switch'}),
('switch_data_0', {'value': None, 'kind': 'data', 'executable': True}),
('switch_data_1', {'value': None, 'kind': 'data', 'executable': True})
]
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_attrs(nodes_with_attrs=nodes, edges_with_attrs=edges)
# We should propagate shapes and values
graph_ref = build_graph_with_attrs(nodes_with_attrs=nodes,
edges_with_attrs=edges,
update_nodes_attributes=[('switch_data_0', {'shape': np.array([3, 3]),
'value': np.zeros((3,3))}),
('switch_data_1', {'shape': np.array([3, 3]),
'value': np.zeros((3,3))})])
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.infer(node)
(flag, resp) = compare_graphs(graph, graph_ref, 'switch_data_0', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_switch_infer_no_condition(self):
nodes = [
('tensor', {'value': None, 'kind': 'data', 'executable': True, 'shape': np.array([1, 2, 1])}),
('pred_id', {'value': None, 'kind': 'data', 'executable': True}),
('switch', {'type': 'Switch', 'kind': 'op', 'op': 'Switch'}),
('switch_data_0', {'value': None, 'kind': 'data', 'executable': True}),
('switch_data_1', {'value': None, 'kind': 'data', 'executable': True})
]
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_attrs(nodes_with_attrs=nodes, edges_with_attrs=edges)
# We should propagate only shapes
graph_ref = build_graph_with_attrs(nodes_with_attrs=nodes,
edges_with_attrs=edges,
update_nodes_attributes=[('switch_data_0', {'shape': np.array([1, 2, 1])}),
('switch_data_1', {'shape': np.array([1, 2, 1])})])
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.infer(node)
(flag, resp) = compare_graphs(graph, graph_ref, 'switch_data_0', check_op_attrs=True)
self.assertTrue(flag, resp)
def test_switch_cf_infer_no_condition(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': None, 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_0': {'value': None, 'kind': 'data', 'executable': True},
'switch_data_1': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
# In this case we should mark all ports as executable
me_mock.assert_has_calls([call('switch_data_0', True), call('switch_data_1', True)], any_order=True)
def test_switch_cf_true_both_ports(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(True), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_0': {'value': None, 'kind': 'data', 'executable': True},
'switch_data_1': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_0', False), call('switch_data_1', True)], any_order=True)
def test_switch_cf_false_both_ports(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(False), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_0': {'value': None, 'kind': 'data', 'executable': True},
'switch_data_1': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_0', True), call('switch_data_1', False)], any_order=True)
def test_switch_cf_true_one_exec_port(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(True), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_1': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_1', True)], any_order=True)
def test_switch_cf_false_one_exec_port(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(False), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_0': {'value': None, 'kind': 'data', 'executable': True},
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_0', True)], any_order=True)
def test_switch_cf_true_no_exec(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(True), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_0': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_0', {'out': 0}),
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_0', False)], any_order=True)
def test_switch_cf_false_no_exec(self):
me_mock = Mock()
nodes = {
'tensor': {'value': True, 'kind': 'data', 'executable': True},
'pred_id': {'value': np.array(False), 'kind': 'data', 'executable': True},
'switch': {'type': 'Switch', 'kind': 'op', 'op': 'Switch'},
'switch_data_1': {'value': None, 'kind': 'data', 'executable': True}
}
edges = [
('tensor', 'switch', {'in': 0}),
('pred_id', 'switch', {'in': 1}),
('switch', 'switch_data_1', {'out': 1})
]
graph = build_graph_with_edge_attrs(nodes, edges)
tested_class = Switch(graph=graph, attrs={})
node = Node(graph, 'switch')
tested_class.control_flow_infer(node, True, me_mock)
me_mock.assert_has_calls([call('switch_data_1', False)], any_order=True)
| 45.809322
| 119
| 0.537878
| 1,255
| 10,811
| 4.390438
| 0.114741
| 0.079855
| 0.104537
| 0.127768
| 0.850998
| 0.835209
| 0.824864
| 0.812523
| 0.797096
| 0.790744
| 0
| 0.014097
| 0.284803
| 10,811
| 235
| 120
| 46.004255
| 0.698526
| 0.063824
| 0
| 0.715789
| 0
| 0
| 0.215424
| 0
| 0
| 0
| 0
| 0
| 0.047368
| 1
| 0.047368
| false
| 0
| 0.036842
| 0
| 0.089474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46241b6a2b137021f7969ffddaa43e93751522b0
| 5,135
|
py
|
Python
|
google/cloud/contact_center_insights_v1/types/__init__.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/contact_center_insights_v1/types/__init__.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/contact_center_insights_v1/types/__init__.py
|
renovate-bot/python-contact-center-insights
|
d133f4028d862cc39d10ba4b0879df256a3505c1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .contact_center_insights import (
CalculateIssueModelStatsRequest,
CalculateIssueModelStatsResponse,
CalculateStatsRequest,
CalculateStatsResponse,
CreateAnalysisOperationMetadata,
CreateAnalysisRequest,
CreateConversationRequest,
CreateIssueModelMetadata,
CreateIssueModelRequest,
CreatePhraseMatcherRequest,
DeleteAnalysisRequest,
DeleteConversationRequest,
DeleteIssueModelMetadata,
DeleteIssueModelRequest,
DeletePhraseMatcherRequest,
DeployIssueModelMetadata,
DeployIssueModelRequest,
DeployIssueModelResponse,
ExportInsightsDataMetadata,
ExportInsightsDataRequest,
ExportInsightsDataResponse,
GetAnalysisRequest,
GetConversationRequest,
GetIssueModelRequest,
GetIssueRequest,
GetPhraseMatcherRequest,
GetSettingsRequest,
ListAnalysesRequest,
ListAnalysesResponse,
ListConversationsRequest,
ListConversationsResponse,
ListIssueModelsRequest,
ListIssueModelsResponse,
ListIssuesRequest,
ListIssuesResponse,
ListPhraseMatchersRequest,
ListPhraseMatchersResponse,
UndeployIssueModelMetadata,
UndeployIssueModelRequest,
UndeployIssueModelResponse,
UpdateConversationRequest,
UpdateIssueModelRequest,
UpdateIssueRequest,
UpdatePhraseMatcherRequest,
UpdateSettingsRequest,
ConversationView,
)
from .resources import (
Analysis,
AnalysisResult,
AnnotationBoundary,
AnswerFeedback,
ArticleSuggestionData,
CallAnnotation,
Conversation,
ConversationDataSource,
ConversationLevelSentiment,
ConversationParticipant,
DialogflowIntent,
DialogflowInteractionData,
DialogflowSource,
Entity,
EntityMentionData,
ExactMatchConfig,
FaqAnswerData,
GcsSource,
HoldData,
Intent,
IntentMatchData,
InterruptionData,
Issue,
IssueAssignment,
IssueModel,
IssueModelLabelStats,
IssueModelResult,
PhraseMatchData,
PhraseMatcher,
PhraseMatchRule,
PhraseMatchRuleConfig,
PhraseMatchRuleGroup,
RuntimeAnnotation,
SentimentData,
Settings,
SilenceData,
SmartComposeSuggestionData,
SmartReplyData,
)
__all__ = (
"CalculateIssueModelStatsRequest",
"CalculateIssueModelStatsResponse",
"CalculateStatsRequest",
"CalculateStatsResponse",
"CreateAnalysisOperationMetadata",
"CreateAnalysisRequest",
"CreateConversationRequest",
"CreateIssueModelMetadata",
"CreateIssueModelRequest",
"CreatePhraseMatcherRequest",
"DeleteAnalysisRequest",
"DeleteConversationRequest",
"DeleteIssueModelMetadata",
"DeleteIssueModelRequest",
"DeletePhraseMatcherRequest",
"DeployIssueModelMetadata",
"DeployIssueModelRequest",
"DeployIssueModelResponse",
"ExportInsightsDataMetadata",
"ExportInsightsDataRequest",
"ExportInsightsDataResponse",
"GetAnalysisRequest",
"GetConversationRequest",
"GetIssueModelRequest",
"GetIssueRequest",
"GetPhraseMatcherRequest",
"GetSettingsRequest",
"ListAnalysesRequest",
"ListAnalysesResponse",
"ListConversationsRequest",
"ListConversationsResponse",
"ListIssueModelsRequest",
"ListIssueModelsResponse",
"ListIssuesRequest",
"ListIssuesResponse",
"ListPhraseMatchersRequest",
"ListPhraseMatchersResponse",
"UndeployIssueModelMetadata",
"UndeployIssueModelRequest",
"UndeployIssueModelResponse",
"UpdateConversationRequest",
"UpdateIssueModelRequest",
"UpdateIssueRequest",
"UpdatePhraseMatcherRequest",
"UpdateSettingsRequest",
"ConversationView",
"Analysis",
"AnalysisResult",
"AnnotationBoundary",
"AnswerFeedback",
"ArticleSuggestionData",
"CallAnnotation",
"Conversation",
"ConversationDataSource",
"ConversationLevelSentiment",
"ConversationParticipant",
"DialogflowIntent",
"DialogflowInteractionData",
"DialogflowSource",
"Entity",
"EntityMentionData",
"ExactMatchConfig",
"FaqAnswerData",
"GcsSource",
"HoldData",
"Intent",
"IntentMatchData",
"InterruptionData",
"Issue",
"IssueAssignment",
"IssueModel",
"IssueModelLabelStats",
"IssueModelResult",
"PhraseMatchData",
"PhraseMatcher",
"PhraseMatchRule",
"PhraseMatchRuleConfig",
"PhraseMatchRuleGroup",
"RuntimeAnnotation",
"SentimentData",
"Settings",
"SilenceData",
"SmartComposeSuggestionData",
"SmartReplyData",
)
| 26.884817
| 74
| 0.736709
| 270
| 5,135
| 13.988889
| 0.581481
| 0.015886
| 0.006884
| 0.008472
| 0.867355
| 0.867355
| 0.867355
| 0.867355
| 0.867355
| 0.867355
| 0
| 0.002156
| 0.186952
| 5,135
| 190
| 75
| 27.026316
| 0.902515
| 0.110808
| 0
| 0
| 0
| 0
| 0.36
| 0.230549
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011494
| 0
| 0.011494
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46658fdb0c1263cbaf61175c282966770198ad94
| 22,401
|
py
|
Python
|
laygo/generators/future/refactor.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 26
|
2017-07-07T08:06:31.000Z
|
2021-11-25T06:41:24.000Z
|
laygo/generators/future/refactor.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 9
|
2016-12-28T03:08:29.000Z
|
2019-01-30T16:00:28.000Z
|
laygo/generators/future/refactor.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 10
|
2018-07-14T01:31:28.000Z
|
2021-08-21T10:18:30.000Z
|
# refactor
import re, os
def convert_pos_to_named(filename_i, filename_o, func_name):
"""
Convert positional arguments to named arguments for further refactoring under following assumptions.
1. Refactoring functions defined in GridLayoutGenerator.py
2. GridLayoutGenerator is instantiated as laygen
"""
# GridLayoutGenerator parameters
laygen_path="../../GridLayoutGenerator.py"
laygen_instance="laygen"
# read function definition
trig=0 #trigger for multiline definitions
with open(laygen_path, 'r') as f:
lines_s = f.readlines()
# read function definition
for l in lines_s:
if 'def '+func_name+'(' in l: # end of function call
l_token_arg = [] #tokens for arguments
trig = 1 #trig to readout arguments over multiple lines
depth = 0 # depth variable to figure out argument definitions
print("function " + func_name + " definition detected. code snapshot: " + l[:-1])
if trig == 1:
s_buf=''
for c in l:
if c==' ' and s_buf=='': #ignore spaces between commas and indents
pass
else:
if c==')' or c==']': #exit bracket decrease depth
if depth == 1: #end of definition
if s_buf=='': #some functions have something like , ):
pass
else:
l_token_arg.append(s_buf)
s_buf = ''
trig = 0
depth -= 1
if c==',' and depth == 1: #if the comma is argument splitter,
l_token_arg.append(s_buf)
s_buf=''
else:
if depth >= 1:
s_buf += c
if c=='(' or c=='[': #go inside bracket. increase depth
depth += 1
if trig == 0:
l_token_arg=l_token_arg[1:] #exclude self
for i, item in enumerate(l_token_arg):
if '=' in item: #has a default value
l_token_arg[i] = re.split("=", item)[0]
if trig == 0:
print("function "+func_name+" definition loaded. arguments: "+str(l_token_arg))
# read source code
with open(filename_i, 'r') as f:
lines_i = f.readlines()
print("file " + filename_i + " loaded")
# refactor
trig = 0 # trigger for multiline call
depth = 0 # depth variable to find out arguments
lines_o = [] # output buffer
for i, l in enumerate(lines_i):
if laygen_instance + '.' + func_name in l:
trig = 1
l_header = l.split(laygen_instance + '.' + func_name)[0]
depth = -1*(l_header.count('(') + l_header.count('[')) + (l_header.count(')') + l_header.count(']')) #if brackets are before the function call, need to decrease initial depth value
print("function " + func_name + " call detected in file: "+ filename_i + ", in line:"+ str(i) +" code snapshot: " + l[:-1])
if trig == 1:
l_vanilla = l #copy original one
l_refac = '' #refactored line
s_buf = '' #string buffer to store arguments
arg_index = 0 #argument index to map positional arguments
trig_refac_arg_readout = 0 # trigger to readout arguments
for c in l:
#mystr=" rect_xy_list = [laygen.get_rect_xy(name=r.name, gridname=gridname, sort=True) for r in rect_list]"
#mystr=" laygen.pin(name='VREF_M5_2<2>', layer=laygen.layers['pin'][5], xy=laygen.get_rect_xy(rvref2v2.name, gridname=rg_m4m5), gridname=rg_m4m5, netname='VREF<2>')"
#mystr=" diffpair_origin = laygen.get_inst_xy(itapbl0.name, pg) + laygen.get_template_xy(itapbl0.cellname, pg) * np.array([0, 1])"
#mystr=" org=origin+laygen.get_inst_xy('I'+objectname_pfix+'INV1', pg)+ laygen.get_template_xy(i1.cellname, pg) * np.array([1, 0])"
#if l_vanilla.startswith(mystr):
# print(c, depth, trig_refac_arg_readout, trig_refac_arg, s_buf)
trig_copy = 1 # trigger to copy c to l_refac without modifications
trig_refac_arg = 0 # trigger to refactor arguments after readout
if c == ' ' and s_buf == '': # ignore spaces between commas and indents
pass
else:
if c == ')' or c == ']': # exit bracket decrease depth
if depth == 1: # end of function call
if s_buf == '': # some functions have something like , ):
pass
else:
if trig_refac_arg_readout == 1:
trig_refac_arg = 1
trig = 0 #end of function call
trig_refac_arg_readout = 0 # reset readout trigger
depth -= 1
if c == ',' and depth == 1 and trig_refac_arg_readout==1: # if the comma is argument splitter,
trig_refac_arg = 1
elif c == '\n' and depth == 1: # newline
if s_buf == '': #no captured argument, the start of function call or captured well - just copy and paste
pass
else: # maybe the end of function call - do refactoring
if trig_refac_arg_readout == 1:
trig_refac_arg = 1
else:
if (depth >= 1) and (trig_refac_arg_readout == 1):
s_buf += c
trig_copy = 0
if c == '(' or c == '[': # go inside bracket. increase depth
depth += 1
if (depth == 1) and (l_refac[(-1*len(func_name)):]==func_name):
trig_refac_arg_readout = 1 #found the right spot. Start readout
if trig_refac_arg == 1: #s_buf filled, do refactoring
if '=' in s_buf: #named argument, just copy and paste
l_refac += s_buf
else: #positional argument!
l_refac += l_token_arg[arg_index] + ' = ' + s_buf
arg_index += 1 #move to next argument
s_buf = '' #flush s_buf
if trig_copy == 1:
l_refac += c
#if c=='\n':
# print('newline', depth, trig_copy)
print(" before refactoring: "+l_vanilla[:-1]) #remove newline for neat plotting
print(" after refactoring: "+l_refac[:-1])
#print(len(l),len(l_refac))
lines_o.append(l_refac) #
else: #normal codes, just copy and paste
lines_o.append(l)
# write source code
with open(filename_o, 'w') as f:
for l in lines_o:
f.write(l)
def convert_pin_from_rect_to_pin(filename_i, filename_o):
"""
Convert obsolete pin_from_rect function call to pin.
Assumes the original function calls are refactored to have positional arguments only.
ex)
laygen.pin_from_rect(name='A', layer=laygen.layers['pin'][2], rect=ra, gridname=rg12)
will be converted to
laygen.pin(name='A', gridname=rg12, refobj=ra)
parameter mapping)
name -> name
layer -> layer or discard(?)
rect -> refobj
gridname -> gridname
netname -> netname
"""
# GridLayoutGenerator parameters
laygen_instance = "laygen"
func_name = "pin_from_rect"
func_name_new = "pin"
param_map = {
'name':'name',
'layer':'layer',
'rect':'refobj',
'gridname':'gridname',
'netname':'netname'
}
# read source code
with open(filename_i, 'r') as f:
lines_i = f.readlines()
print("file " + filename_i + " loaded")
# refactor
trig = 0 # trigger for multiline call
depth = 0 # depth variable to find out arguments
lines_o = [] # output buffer
for i, l in enumerate(lines_i):
if laygen_instance + '.' + func_name in l:
trig = 1
l_header = l.split(laygen_instance + '.' + func_name)[0]
depth = -1 * (l_header.count('(') + l_header.count(
'[')) # if brackets are before the function call, need to decrease initial depth value
print("function " + func_name + " call detected in file: " + filename_i + ", in line:" + str(
i) + " code snapshot: " + l[:-1])
if trig == 1:
l_vanilla = l # copy original one
l = l.replace(laygen_instance + '.' + func_name, laygen_instance + '.' + func_name_new)
l_refac = '' # refactored line
s_buf = '' # string buffer to store arguments
for c in l:
trig_copy = 1 # trigger to copy c to l_refac without modifications
trig_refac_arg = 0 # trigger to refactor arguements after readout
if c == ' ' and s_buf == '': # ignore spaces between commas and indents
pass
else:
if c == ')' or c == ']': # exit bracket decrease depth
if depth == 1: # end of function call
if s_buf == '': # some functions have something like , ):
pass
else:
trig_refac_arg = 1
trig = 0 # end of function call
depth -= 1
if c == ',' and depth == 1: # if the comma is argument splitter,
trig_refac_arg = 1
elif c == '\n' and depth == 1: # newline
if s_buf == '': # no captured argument, the start of function call or captured well - just copy and paste
pass
else: # maybe the end of function call - do refactoring
trig_refac_arg = 1
else:
if depth >= 1:
s_buf += c
trig_copy = 0
if c == '(' or c == '[': # go inside bracket. increase depth
depth += 1
if trig_refac_arg == 1: # s_buf filled, do refactoring
if '=' in s_buf: # named argument, just copy and paste
token_s_buf=s_buf.split('=')
key=token_s_buf[0].strip()
token_s_buf[0] = token_s_buf[0].replace(key, param_map[key])
token_s_buf[0] += '='
l_refac += "".join(token_s_buf)
else: # positional argument, ERROR!
raise Exception("This refactoring function assumes function calls named arguments only. Exiting (do revert)")
s_buf = '' # flush s_buf
if trig_copy == 1:
l_refac += c
# if c=='\n':
# print('newline', depth, trig_copy)
print(" before refactoring: " + l_vanilla[:-1]) # remove newline for neat plotting
print(" after refactoring: " + l_refac[:-1])
# print(len(l),len(l_refac))
lines_o.append(l_refac) #
else: # normal codes, just copy and paste
lines_o.append(l)
# write source code
with open(filename_o, 'w') as f:
for l in lines_o:
f.write(l)
def convert_get_obj_xy_to_get_xy(filename_i, filename_o, func_name):
"""
Convert positional arguments to named arguments for further refactoring under following assumptions.
1. Refactoring functions defined in GridLayoutGenerator.py
2. GridLayoutGenerator is instantiated as laygen
"""
# GridLayoutGenerator parameters
laygen_path="../../GridLayoutGenerator.py"
laygen_instance="laygen"
func_name_new="get_xy"
# read source code
with open(filename_i, 'r') as f:
lines_i = f.readlines()
print("file " + filename_i + " loaded")
# refactor
trig = 0 # trigger for multiline call
depth = 0 # depth variable to find out arguments
lines_o = [] # output buffer
for i, l in enumerate(lines_i):
if laygen_instance + '.' + func_name in l:
trig = 1
l_header = l.split(laygen_instance + '.' + func_name)[0]
depth = -1*(l_header.count('(') + l_header.count('[')) + (l_header.count(')') + l_header.count(']')) #if brackets are before the function call, need to decrease initial depth value
print("function " + func_name + " call detected in file: "+ filename_i + ", in line:"+ str(i) +" code snapshot: " + l[:-1])
if trig == 1:
l_vanilla = l #copy original one
l = l.replace(laygen_instance + '.' + func_name, laygen_instance + '.' + func_name_new)
l_refac = '' #refactored line
s_buf = '' #string buffer to store arguments
trig_refac_arg_readout = 0 # trigger to readout arguments
for c in l:
#mystr=" rect_xy_list = [laygen.get_rect_xy(name=r.name, gridname=gridname, sort=True) for r in rect_list]"
#mystr=" laygen.pin(name='VREF_M5_2<2>', layer=laygen.layers['pin'][5], xy=laygen.get_rect_xy(rvref2v2.name, gridname=rg_m4m5), gridname=rg_m4m5, netname='VREF<2>')"
#mystr=" diffpair_origin = laygen.get_inst_xy(itapbl0.name, pg) + laygen.get_template_xy(itapbl0.cellname, pg) * np.array([0, 1])"
#mystr=" org=origin+laygen.get_inst_xy('I'+objectname_pfix+'INV1', pg)+ laygen.get_template_xy(i1.cellname, pg) * np.array([1, 0])"
#if l_vanilla.startswith(mystr):
# print(c, depth, trig_refac_arg_readout, trig_refac_arg, s_buf)
trig_copy = 1 # trigger to copy c to l_refac without modifications
trig_refac_arg = 0 # trigger to refactor arguments after readout
if c == ' ' and s_buf == '': # ignore spaces between commas and indents
pass
else:
if c == ')' or c == ']': # exit bracket decrease depth
if depth == 1: # end of function call
if s_buf == '': # some functions have something like , ):
pass
else:
if trig_refac_arg_readout == 1:
trig_refac_arg = 1
trig = 0 #end of function call
trig_refac_arg_readout = 0 # reset readout trigger
depth -= 1
if c == ',' and depth == 1 and trig_refac_arg_readout==1: # if the comma is argument splitter,
trig_refac_arg = 1
elif c == '\n' and depth == 1: # newline
if s_buf == '': #no captured argument, the start of function call or captured well - just copy and paste
pass
else: # maybe the end of function call - do refactoring
if trig_refac_arg_readout == 1:
trig_refac_arg = 1
else:
if (depth >= 1) and (trig_refac_arg_readout == 1):
s_buf += c
trig_copy = 0
if c == '(' or c == '[': # go inside bracket. increase depth
depth += 1
if (depth == 1) and (l_refac[(-1*len(func_name_new)):]==func_name_new):
trig_refac_arg_readout = 1 #found the right spot. Start readout
if trig_refac_arg == 1: #s_buf filled, do refactoring
if '=' in s_buf: #named argument
token_s_buf = s_buf.split('=')
print(token_s_buf)
if token_s_buf[0].strip()=='name':
if '.name' in token_s_buf[1]:
token_s_buf[1] = token_s_buf[1].replace('.name', '')
token_s_buf[0] = 'obj ='
l_refac += "".join(token_s_buf)
elif '.cellname' in token_s_buf[1]:
token_s_buf[1] = token_s_buf[1].replace('.cellname', '.template')
token_s_buf[0] = 'obj ='
l_refac += "".join(token_s_buf)
else:
if func_name == 'get_inst_xy':
l_refac += 'obj ='+laygen_instance+'.get_inst('+s_buf+')'
elif func_name == 'get_template_xy':
l_refac += 'obj=' + laygen_instance + '.get_template(' + s_buf + '%%PLACEHOLDER_FOR_LIBNAME%%)'
elif func_name == 'get_rect_xy':
l_refac += 'obj ='+laygen_instance+'.get_rect('+s_buf+')'
else:
raise Exception("check this")
elif token_s_buf[0].strip()=='libname':
if func_name == 'get_template_xy':
l_refac = l_refac.replace('%%PLACEHOLDER_FOR_LIBNAME%%', ', '+s_buf)
else:
l_refac += s_buf
else: #positional argument, ERROR!
raise Exception(
"This refactoring function assumes function calls named arguments only. Exiting (do revert)")
s_buf = '' #flush s_buf
if trig_copy == 1:
l_refac += c
#if c=='\n':
# print('newline', depth, trig_copy)
if func_name == 'get_template_xy':
l_refac = l_refac.replace('%%PLACEHOLDER_FOR_LIBNAME%%','')
l_refac = l_refac.replace(', )', ')')
print(" before refactoring: "+l_vanilla[:-1]) #remove newline for neat plotting
print(" after refactoring: "+l_refac[:-1])
#print(len(l),len(l_refac))
lines_o.append(l_refac) #
else: #normal codes, just copy and paste
lines_o.append(l)
# write source code
with open(filename_o, 'w') as f:
for l in lines_o:
f.write(l)
if __name__ == '__main__':
files_include_special = [
'nand_demo.py',
'sarsamp_golden_example.py',
'lab1_a_baselayoutgenerator_export.py',
'lab1_b_baselayoutgenerator_import.py',
'lab1_c_cds_ff_mpt_generate_primitives.py',
'lab1_c_faketech_generate_primitives.py',
'lab2_a_gridlayoutgenerator_constructtemplate.py',
'lab2_b_gridlayoutgenerator_layoutexercise.py',
'lab2_c_gridlayoutgenerator_logictemplate.py',
'lab2_d_gridlayoutgenerator_layoutexercise_2.py',
] #files to be refactored but not with _layout_generator suffix
'''
#positional to named - single run example
#filename_i = "../logic/logic_templates_layout_generator.py"
#filename_o = "../logic/logic_templates_layout_generator_refactored.py"
filename_i = "../serdes/serdes_layout_generator.py"
filename_o = "../serdes/serdes_layout_generator_refactored.py"
# func_name = "pin_from_rect"
func_name = "relplace"
convert_pos_to_named(filename_i=filename_i, filename_o=filename_o, func_name=func_name)
'''
'''
#positional to named - massive run over multiple directories, functions
dir_list = ["./", "../adc_sar/", "../golden/", "../logic/", "../serdes/", "../../labs/"]
func_list=["get_template_xy", "get_inst_xy", "get_rect_xy", "get_pin_xy"]
for dir in dir_list:
file_list=os.listdir(dir)
for file in file_list:
if file.endswith('_layout_generator.py') or any((file == fn) for fn in files_include_special):
filename=dir+file
for func in func_list:
convert_pos_to_named(filename_i=filename, filename_o=filename, func_name=func)
'''
'''
#pin_from_rect to pin - single run example
filename_i = "../serdes/ser_layout_generator.py"
filename_o = "../serdes/ser_layout_generator_refactored.py"
convert_pin_from_rect_to_pin(filename_i=filename_i, filename_o=filename_o)
'''
'''
#pin_from_rect to pin - massive run over multiple directories, functions
dir_list=["./", "../adc_sar/", "../golden/", "../logic/", "../serdes/", "../../labs/"]
for dir in dir_list:
file_list=os.listdir(dir)
for file in file_list:
if file.endswith('_layout_generator.py') or any((file == fn) for fn in files_include_special):
filename=dir+file
convert_pin_from_rect_to_pin(filename_i=filename, filename_o=filename)
'''
#get_(obj)_xy to get_obj - massive run over multiple directories, functions
dir_list=["./", "../adc_sar/", "../golden/", "../logic/", "../serdes/", "../../labs/"]
#func_list = ["get_template_xy", "get_inst_xy", "get_rect_xy", "get_pin_xy"]
#func_list = ["get_inst_xy", "get_rect_xy", "get_pin_xy"]
func_list = ["get_template_xy"]
for dir in dir_list:
file_list=os.listdir(dir)
for file in file_list:
if file.endswith('_layout_generator.py') or any((file == fn) for fn in files_include_special):
filename=dir+file
for func in func_list:
convert_get_obj_xy_to_get_xy(filename_i=filename, filename_o=filename, func_name=func)
| 51.974478
| 192
| 0.515781
| 2,608
| 22,401
| 4.196702
| 0.103911
| 0.024121
| 0.036181
| 0.027775
| 0.844861
| 0.807949
| 0.78995
| 0.762905
| 0.751485
| 0.734308
| 0
| 0.012754
| 0.380474
| 22,401
| 430
| 193
| 52.095349
| 0.775904
| 0.268827
| 0
| 0.742671
| 0
| 0
| 0.111189
| 0.034694
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009772
| false
| 0.035831
| 0.006515
| 0
| 0.016287
| 0.04886
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46a6e3723c911dab40b187953c25d6976a753954
| 189
|
py
|
Python
|
tests/parser/aggregates.count.satisfied.1.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.satisfied.1.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/aggregates.count.satisfied.1.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
c(1) | c(2).
a(X) :- c(X).
p(4).
okay(X) :- p(Y), c(X), #count{V : a(V)} <Y.
"""
output = """
c(1) | c(2).
a(X) :- c(X).
p(4).
okay(X) :- p(Y), c(X), #count{V : a(V)} <Y.
"""
| 14.538462
| 44
| 0.354497
| 44
| 189
| 1.522727
| 0.295455
| 0.119403
| 0.089552
| 0.119403
| 0.835821
| 0.835821
| 0.835821
| 0.835821
| 0.835821
| 0.835821
| 0
| 0.040268
| 0.21164
| 189
| 12
| 45
| 15.75
| 0.409396
| 0
| 0
| 0.833333
| 0
| 0.166667
| 0.835979
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d3cdcc89af21ea74d3e16db04cf539ab3e880e19
| 7
|
py
|
Python
|
tests/python3_aug_assign/aug_assign.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 362
|
2018-02-17T10:25:11.000Z
|
2022-03-30T21:04:59.000Z
|
tests/python3_aug_assign/aug_assign.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 70
|
2018-02-17T04:00:14.000Z
|
2019-08-21T18:01:52.000Z
|
tests/python3_aug_assign/aug_assign.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 36
|
2018-02-18T23:11:25.000Z
|
2021-09-20T07:19:36.000Z
|
a @= 1
| 3.5
| 6
| 0.285714
| 2
| 7
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.428571
| 7
| 1
| 7
| 7
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3108e03377e14cd95c1edf93cadb9408be13746c
| 24,638
|
py
|
Python
|
proteus/tests/test_sedclosure.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/tests/test_sedclosure.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
proteus/tests/test_sedclosure.py
|
yuxianglin/proteus
|
ac5d5223410b1a1f270615f987e9cf327fb802af
|
[
"NASA-1.3"
] | null | null | null |
from proteus import Comm, Profiling
import numpy as np
import numpy.testing as npt
import unittest
import pytest
comm = Comm.init()
Profiling.procID = comm.rank()
Profiling.logEvent("Testing SedClosure")
class GlobalVariables():
def __init__(self):
from proteus.mprans.SedClosure import HsuSedStress
self.C4e = 1.
self.C3e = 1.2
self.eR = 0.8
self.aDarcy = 1.
self.bForch = 1.
self.grain = 0.1
self.packFraction = 0.2
self.packMargin = 0.01
self.sigmaC = 1.1
self.maxFraction = 0.635
self.frFraction = 0.57
self.fContact = 0.02
self.mContact = 2.
self.nContact = 5.
self.angFriction = np.pi/6.
self.sedSt = HsuSedStress( self.aDarcy, self.bForch, self.grain, self.packFraction, self.packMargin,self.maxFraction, self.frFraction, self.sigmaC, self.C3e, self.C4e, self.eR ,self.fContact, self.mContact, self.nContact, self.angFriction)
class TestHsu(unittest.TestCase):
def testGranularDrag1(self):
gl = GlobalVariables()
import random
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
rhoFluid = 1. + random.random()
umag_temp = (uf - us)*(uf - us)
umag = np.sqrt(sum(umag_temp))
# Testing for sedF >> gl.packFraction
nu = 1.
sedF = 0.5
drag = (gl.aDarcy * nu* sedF /((1.-sedF)*gl.grain**2) + gl.bForch * umag / gl.grain)*rhoFluid
# For sedF > pacFraction - > drag = a * nu* sedF /((1-sedF)*gl.grain^2) + beta * umag / gl.grain
drag2 = gl.sedSt.betaCoeff(sedF, rhoFluid, uf, us, nu)
if(drag2 != 0):
drag /=drag2
drag2/=drag2
npt.assert_almost_equal(drag,drag2)
@pytest.mark.skip(reason="in development")
def testGranularDrag2(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
umag_temp = (uf - us)*(uf - us)
umag = np.sqrt(sum(umag_temp))
# Testing for sedF << gl.packFraction and Rep = (1. - sed) * umag*nu/gl.grain = 0.9 * 5. * 0.1 / 1. = 0.45
nu = 1.
sedF = 0.1
Rep = (1.- sedF)*umag*gl.grain / nu
drag = rhoFluid * ( 24. * (1.+0.15*Rep**(0.687))/Rep) * 0.75 * umag * (1. -sedF)**(-1.65) / gl.grain # Chen and Hsu 2014
drag2 = gl.sedSt.betaCoeff(sedF, rhoFluid, uf, us, nu)
if(drag2 != 0):
drag /=drag2
drag2/=drag2
#if you use npt.assert_almost_equal you get more info on failure...
#self.assertTrue(round(drag,10) == round(drag2,10))
npt.assert_almost_equal(drag,drag2)
@pytest.mark.skip(reason="in development")
def testGranularDrag3(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
umag_temp = (uf - us)*(uf - us)
umag = np.sqrt(sum(umag_temp))
# Testing for sedF << gl.packFraction and Rep > 1000
sedF = 0.1
nu = 1e-4
Rep = (1.- sedF) * umag * gl.grain / nu
drag = rhoFluid * ( 0.44 * 0.75 * umag * (1. -sedF)**(-1.65) )/ gl.grain # Chen and Hsu 2014
drag2 = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
if(drag2 != 0):
drag /=drag2
drag2/=drag2
#self.assertTrue(round(drag,10) == round(drag2,10))
npt.assert_almost_equal(drag, drag2)
@pytest.mark.skip(reason="in development")
def testGranularDrag4(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
# Constant params
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
umag_temp = (uf - us)*(uf - us)
umag = np.sqrt(sum(umag_temp))
# Testing for sedF = gl.packFraction +0.5 packmargin and Rep > 1000
sedF = 0.205
nu = 1e-4
Rep = (1.- sedF) * umag * gl.grain / nu
draga = gl.aDarcy * nu* sedF /((1.-sedF)*gl.grain**2) + gl.bForch * umag / gl.grain
dragb = ( 0.44 * 0.75 * umag * (1. -sedF)**(-1.65) )/ gl.grain # Chen and Hsu 2014
w = 0.5 + (sedF - gl.packFraction) / (2. * gl.packMargin)
drag =rhoFluid* (w*draga + (1.-w) * dragb)
drag2 = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
if(drag2 != 0):
drag /=drag2
drag2/=drag2
#self.assertTrue(round(drag,10) == round(drag2,10))
npt.assert_almost_equal(drag, drag2)
def testgs0(self):
gl=GlobalVariables()
f = 8
sedF = 0.2
gs0 = gl.sedSt.gs0(sedF)
self.assertTrue(gs0 == 0.5*(2-sedF)/(1-sedF)**3)
sedF = 0.55
gs0 = gl.sedSt.gs0(sedF)
self.assertTrue(round(gs0,f) ==round( 0.5*(2-0.49)/(1-0.49)**3 * (0.64-0.49)/(0.64-sedF),f))
sedF = 0.65
gs0 = gl.sedSt.gs0(sedF)
self.assertTrue(round(gs0,f) == round(0.5*(2-0.49)/(1-0.49)**3 * (0.64-0.49)/(0.64-0.635),f))
def testTkeSed(self):
gl=GlobalVariables()
import random
rhoFluid = 10. + random.random()
f = 10
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1
nuT = 1
sedF = 0.3
# Setting 0 t_c
theta_n = 0.25 + 0.25*random.random() + 1e-30
kappa_n = 0.1 + 0.1*random.random() + 1e-30
kappa_np1 = 0.1 + 0.1 * random.random() + 1e-30
epsilon_n = 0.1 + 0.1 * random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
t_p =rhoS/ beta
l_c = np.sqrt(np.pi)*gl.grain / (24.*sedF * gl.sedSt.gs0(sedF))
t_cl = min(l_c/np.sqrt(theta_n) , 0.165*kappa_n/epsilon_n)
aa = 1/( 1. + t_p/t_cl)
es1 = 2.*beta * rhoS*(1-aa)*sedF*kappa_np1/((1-sedF)*rhoFluid)
kappa_sed = gl.sedSt.kappa_sed1(sedF,rhoFluid,rhoS,uf,us,gradC,nu,theta_n,kappa_n,kappa_np1,epsilon_n,nuT)
if es1!=0:
kappa_sed/=es1
es1/=es1
self.assertTrue(round(kappa_sed,f) ==round( -es1,f))
def test_dTkeSed_dk(self):
gl=GlobalVariables()
import random
rhoFluid = 1000. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-2
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = 0.25 + 0.25*random.random() + 1e-30
kappa_n = 0.1 + 0.1*random.random() + 1e-30
kappa_np1 = 0.1 + 0.1 * random.random() + 1e-30
epsilon_n = 0.1 + 0.1 * random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF,rhoFluid, uf, us, nu)
t_p =rhoS/ beta
l_c = np.sqrt(np.pi)*gl.grain / (24.*sedF * gl.sedSt.gs0(sedF))
t_cl = min(l_c/np.sqrt(theta_n),0.165*kappa_n/epsilon_n)
aa = 1/( 1. + t_p/t_cl)
es1 = 2.*beta * rhoS*(1-aa)*sedF/((1-sedF)*rhoFluid)
kappa_sed = gl.sedSt.dkappa_sed1_dk(sedF,rhoFluid,rhoS,uf,us,gradC,nu,theta_n,kappa_n,epsilon_n,nuT)
if es1!=0:
kappa_sed/=es1
es1/=es1
self.assertTrue(round(kappa_sed,f) ==round( -es1,f))
def testTkeSed2(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
f = 10
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
kappa_n = random.random() + 1e-30
kappa_np1 = random.random() + 1e-30
epsilon_n = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF,rhoFluid, uf, us, nu)
UgradC = np.dot((uf - us),gradC)
es2 = beta * rhoFluid * nuT * UgradC / ((1-sedF)*rhoFluid)
kappa_sed2 = gl.sedSt.kappa_sed2(sedF,rhoFluid,rhoS,uf,us,gradC,nu,theta_n,kappa_n,epsilon_n,nuT)
if es2!=0:
kappa_sed2/=es2
es2/=es2
self.assertTrue(round(kappa_sed2,f) ==round( es2,f))
def testEpsSed(self):
gl=GlobalVariables()
import random
rhoFluid = 1000. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
kappa_n = random.random() + 1e-30
epsilon_n = random.random() + 1e-30
epsilon_np1 = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
t_p =rhoS/ beta
l_c = np.sqrt(np.pi)*gl.grain / (24.*sedF * gl.sedSt.gs0(sedF))
t_cl = min(l_c/np.sqrt(theta_n),0.165*kappa_n/epsilon_n)
aa = 1/( 1. + t_p/t_cl)
es1 = 2.*beta * rhoS*(1-aa)*sedF*kappa_n/((1-sedF)*rhoFluid)
UgradC = np.dot((uf - us),gradC)
es2 = beta * rhoFluid * nuT * UgradC / ((1-sedF)*rhoFluid)
eps_sed = gl.sedSt.eps_sed(sedF,rhoFluid,rhoS,uf,us,gradC,nu,theta_n,kappa_n,epsilon_n, epsilon_np1,nuT)
valid = -gl.C3e*es1*epsilon_np1/kappa_n+gl.C4e*es2*epsilon_np1/kappa_n
if(valid!=0.):
eps_sed/=valid
valid/=valid
self.assertTrue(round(eps_sed,f) ==round(valid ,f))
def test_dEpsSed_dE(self):
gl=GlobalVariables()
import random
rhoFluid = 1000. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
kappa_n = random.random() + 1e-30
epsilon_n = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
t_p =rhoS/ beta
l_c = np.sqrt(np.pi)*gl.grain / (24.*sedF * gl.sedSt.gs0(sedF))
t_cl = min(l_c/np.sqrt(theta_n),0.165*kappa_n/epsilon_n)
aa = 1/( 1. + t_p/t_cl)
es1 = 2.*beta * rhoS*(1-aa)*sedF*kappa_n/((1-sedF)*rhoFluid)
UgradC = np.dot((uf - us),gradC)
es2 = beta * rhoFluid * nuT * UgradC / ((1-sedF)*rhoFluid)
eps_sed = gl.sedSt.deps_sed_deps(sedF,rhoFluid,rhoS,uf,us,gradC,nu,theta_n,kappa_n,epsilon_n,nuT)
valid = -gl.C3e*es1/kappa_n+gl.C4e*es2/kappa_n
if(valid!=0.):
eps_sed/=valid
valid/=valid
self.assertTrue(round(eps_sed,f) ==round(valid,f))
def testPsc(self):
gl=GlobalVariables()
import random
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
rhoS = 2000
psc = gl.sedSt.psc(sedF,rhoS,theta)
self.assertTrue(round(psc,f) ==round(rhoS*sedF*(1. + 2*(1.+gl.eR)*sedF*gl.sedSt.gs0(sedF))*theta,f))
def testPscTerm(self):
gl=GlobalVariables()
import random
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
dudx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
divU = dudx + dvdy + dwdz
rhoS = 2000
test = gl.sedSt.psc_term(sedF,rhoS,theta,dudx,dvdy,dwdz)
self.assertTrue(round(test,f) == round(-2.*gl.sedSt.psc(sedF,rhoS,theta)*divU/(3.*rhoS*sedF),f))
def testdpsc_term_dtheta(self):
gl=GlobalVariables()
import random
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
dudx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
divU = dudx + dvdy + dwdz
rhoS = 2000
test = gl.sedSt.dpsc_term_dtheta(sedF,rhoS,dudx,dvdy,dwdz)
self.assertTrue(round(test,f) == round(-2.*gl.sedSt.psc(sedF,rhoS,theta)*divU/(3.*rhoS*sedF)/theta,f))
def testMu_sc(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.mu_sc(sedF,rhoS,theta)
g0 = gl.sedSt.gs0(sedF)
valid = rhoS * gl.grain * sqrt(theta) * ( 0.8 *sedF**2 * g0 * (1. + gl.eR) / sqrt(np.pi) + (1./15) *sedF**2 * g0 * (1. + gl.eR) * sqrt(np.pi) + (1./6.) *sedF * sqrt(np.pi) + (5./48.) * sqrt(np.pi)/((1+gl.eR)*g0))
self.assertTrue(round(test,f) == round(valid,f))
def testL_sc(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.l_sc(sedF,rhoS,theta)
g0 = gl.sedSt.gs0(sedF)
valid = (4./3.)*sedF**2 * rhoS * gl.grain *g0*(1+gl.eR)* (sqrt(theta) / sqrt(np.pi) )
self.assertTrue(round(test,f) == round(valid,f))
def test_tsc_term(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
dudx = random.random() + 1e-30
dudy = random.random() + 1e-30
dudz = random.random() + 1e-30
dvdx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dvdz = random.random() + 1e-30
dwdx = random.random() + 1e-30
dwdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.tausc_term_theta(sedF,rhoS,theta,dudx,dudy, dudz, dvdx, dvdy, dvdz, dwdx, dwdy, dwdz)
divU = dudx + dvdy + dwdz
mu = gl.sedSt.mu_sc(sedF, rhoS, theta)
l = gl.sedSt.l_sc(sedF, rhoS, theta)
s_tensor = np.array([ [ 2.*dudx , dudy+dvdx, dudz+dwdx],
[ dudy+dvdx, 2.*dvdy, dvdz+dwdy],
[ dudz+dwdx, dvdz+dwdy, 2.* dwdz]])
t_tensor = mu * s_tensor + (l - (2./3.) * mu) * divU * np.array([ [ 1 , 0 , 0],
[ 0, 1, 0],
[ 0, 0, 1] ])
product = s_tensor * t_tensor
valid = 0.
for i in product:
for j in i:
valid+=j / (3.*rhoS*sedF)
self.assertTrue(round(test,f) == round(valid,f))
def testgamma_s(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
theta_np1 = random.random() + 1e-30
rhoS = 2000
g0 = gl.sedSt.gs0(sedF)
dudx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
test = gl.sedSt.gamma_s(sedF,rhoS,theta , theta_np1, dudx, dvdy, dwdz)
divU = dudx + dvdy + dwdz
valid = - 3. * (1. - gl.eR**2) * sedF**2 * rhoS * g0 * theta_np1 * ( (sqrt(theta)/sqrt(np.pi)) * (4./gl.grain) - divU) * (2./(3. * rhoS * sedF))
self.assertTrue(round(test,f) == round(valid,f))
def testdgamma_s_dtheta(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
rhoS = 2000
g0 = gl.sedSt.gs0(sedF)
dudx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
test = gl.sedSt.dgamma_s_dtheta(sedF,rhoS,theta , dudx, dvdy, dwdz)
divU = dudx + dvdy + dwdz
valid = - 3. * (1. - gl.eR**2) * sedF**2 * rhoS * g0 * ( (sqrt(theta)/sqrt(np.pi)) * (4./gl.grain) - divU) * (2./(3. * rhoS * sedF))
self.assertTrue(round(test,f) == round(valid,f))
def testJint1(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
kappa_n = random.random() + 1e-30
epsilon_n = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid, uf, us, nu)
t_p =rhoS / beta
l_c = np.sqrt(np.pi)*gl.grain / (24.*sedF * gl.sedSt.gs0(sedF))
t_cl = min(l_c/np.sqrt(theta_n),0.165*kappa_n/epsilon_n)
aa = 1/( 1. + t_p/t_cl)
es1 = 2.*beta * rhoS*(1-aa)*sedF*kappa_n/((1-sedF)*rhoFluid)
UgradC = np.dot((uf - us),gradC)
es2 = beta * rhoFluid * nuT * UgradC / ((1-sedF)*rhoFluid)
test = gl.sedSt.jint1(sedF,rhoFluid, rhoS, uf,us, kappa_n,epsilon_n, theta_n, nu)
self.assertTrue(round(test,f) ==round(2*aa*beta*sedF*kappa_n*(2./(3.*sedF*rhoS)),f ))
def testJint2(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
test = gl.sedSt.jint2(sedF,rhoFluid, rhoS,uf,us, theta_n, nu)
self.assertTrue(round(test,f) ==round(-3*beta*sedF*theta_n*(2./(3.*sedF*rhoS)),f ))
def testJint2dTheta(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
f = 8
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradC=np.array([0.1,0.1])
rhoS = 2000
nu = 1e-4
nuT = 1e-2
sedF = 0.3
# Setting 0 t_c
theta_n = random.random() + 1e-30
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
test = gl.sedSt.djint2_dtheta(sedF,rhoFluid, rhoS,uf,us, nu)
self.assertTrue(round(test,f) ==round(-3*beta*sedF*(2./(3.*sedF*rhoS)),f ))
def testK_diff(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.k_diff(sedF,rhoS,theta)
g0 = gl.sedSt.gs0(sedF)
valid = rhoS * gl.grain * sqrt(theta) * ( 2. *sedF**2 * g0 * (1. + gl.eR) / sqrt(np.pi) + (9./16) *sedF**2 * g0 * (1. + gl.eR) * sqrt(np.pi) + (15./16.) *sedF * sqrt(np.pi) + (25./64.) * sqrt(np.pi)/((1+gl.eR)*g0))
self.assertTrue(round(test,f) == round(valid,f))
def test_mu_fr(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
dudx = random.random() + 1e-30
dudy = random.random() + 1e-30
dudz = random.random() + 1e-30
dvdx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dvdz = random.random() + 1e-30
dwdx = random.random() + 1e-30
dwdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.mu_fr(sedF,dudx,dudy, dudz, dvdx, dvdy, dvdz, dwdx, dwdy, dwdz)
divU = dudx + dvdy + dwdz
s_tensor = 0.5* np.array([ [ 2.*dudx -(2./3.)*divU , dudy+dvdx, dudz+dwdx],
[ dudy+dvdx, 2.*dvdy-(2./3.)*divU, dvdz+dwdy],
[ dudz+dwdx, dvdz+dwdy, 2.* (2./3.) * dwdz]])
product = s_tensor * s_tensor
valid = 0.
for i in product:
for j in i:
valid+=j / (3.*rhoS*sedF)
valid = sqrt(2)*gl.sedSt.p_friction(sedF)*np.sin(gl.angFriction)/2./valid
self.assertTrue(round(test,f) == round(valid,f))
def test_p_s(self):
gl=GlobalVariables()
import random
sqrt = np.sqrt
f = 10
# Setting 0 t_c
sedF = 0.3
theta = random.random() + 1e-30
dudx = random.random() + 1e-30
dudy = random.random() + 1e-30
dudz = random.random() + 1e-30
dvdx = random.random() + 1e-30
dvdy = random.random() + 1e-30
dvdz = random.random() + 1e-30
dwdx = random.random() + 1e-30
dwdy = random.random() + 1e-30
dwdz = random.random() + 1e-30
rhoS = 2000
test = gl.sedSt.tausc_term_theta(sedF,rhoS,theta,dudx,dudy, dudz, dvdx, dvdy, dvdz, dwdx, dwdy, dwdz)
divU = dudx + dvdy + dwdz
mu = gl.sedSt.mu_sc(sedF, rhoS, theta)
muf = gl.sedSt.mu_fr(sedF,dudx,dudy, dudz, dvdx, dvdy, dvdz, dwdx, dwdy, dwdz)
l = gl.sedSt.l_sc(sedF, rhoS, theta)
test = gl.sedSt.p_s( sedF, rhoS, theta, dudx, dudy, dudz, dvdx, dvdy, dvdz, dwdx, dwdy, dwdz)
valid =gl.sedSt.p_friction(sedF) + gl.sedSt.psc(sedF, rhoS, theta) + (2./3.*(mu+muf) - l)*divU
self.assertTrue(round(test,f) == round(valid,f))
def testMintFluid(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
ufp1 = np.array([5.,4.],"d")
usp1 = np.array([1.,1.],"d")
gradc = np.array([0.1,0.1],"d")
sedF = 0.205
nu = 1e-4
nuT = 1e-2
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
mint = gl.sedSt.mIntFluid(sedF, rhoFluid, uf, us, ufp1, nu, nuT, gradc)
self.assertTrue(round(mint.all(),10) == round((-sedF*beta*(ufp1)/(1.-sedF)/rhoFluid ).all() , 10))
def testMintSolid(self):
gl=GlobalVariables()
import random
rhoFluid = 1. + random.random()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
ufp1 = np.array([5.,4.],"d")
usp1 = np.array([1.,1.],"d")
gradc = np.array([0.1,0.1],"d")
sedF = 0.205
nu = 1e-4
nuT = 1e-2
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
mint = gl.sedSt.mIntSolid(sedF, rhoFluid, uf, us, usp1 , nu, nuT, gradc)
self.assertTrue(round(mint.all(),10) == round((-sedF*beta*( - usp1) / (1.-sedF)).all() , 10))
def testMintgradC(self):
import random
gl=GlobalVariables()
rhoFluid = 1. + random.random()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
ufp1 = np.array([5.,4.],"d")
usp1 = np.array([1.,1.],"d")
gradc = np.array([0.1,0.1],"d")
sedF = 0.205
nu = 1e-4
nuT = 1e-2
beta = gl.sedSt.betaCoeff(sedF,rhoFluid, uf, us, nu)
mint = gl.sedSt.mIntgradC(sedF,rhoFluid, uf, us , nu, nuT, gradc)
self.assertTrue(round(mint.all(),10) == round(( - sedF * beta * gradc * nuT / gl.sigmaC / rhoFluid / (1.-sedF)).all() , 10))
def testdMintdUf(self):
import random
rhoFluid = 1. + random.random()
gl=GlobalVariables()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradc = np.array([0.1,0.1],"d")
sedF = 0.205
nu = 1e-4
nuT = 1e-2
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
mint = gl.sedSt.dmInt_duFluid(sedF, rhoFluid, uf, us , nu)
self.assertTrue(round(mint,10) == round( - sedF*beta/(1.-sedF)/rhoFluid , 10))
def testdMintdUs(self):
import random
gl=GlobalVariables()
rhoFluid = 1. + random.random()
gl=GlobalVariables()
uf = np.array([5.,4.],"d")
us = np.array([1.,1.],"d")
gradc = np.array([0.1,0.1],"d")
sedF = 0.205
nu = 1e-4
nuT = 1e-2
beta = gl.sedSt.betaCoeff(sedF, rhoFluid,uf, us, nu)
mint = gl.sedSt.dmInt_duSolid(sedF, rhoFluid, uf, us , nu)
self.assertTrue(round(mint,10) == round( sedF*beta/(1.-sedF)/rhoFluid , 10))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 35.348637
| 248
| 0.516235
| 3,537
| 24,638
| 3.531807
| 0.064744
| 0.088377
| 0.084054
| 0.096061
| 0.82837
| 0.804515
| 0.789946
| 0.774896
| 0.766411
| 0.743516
| 0
| 0.070618
| 0.320643
| 24,638
| 696
| 249
| 35.399425
| 0.675708
| 0.036732
| 0
| 0.753927
| 0
| 0
| 0.004768
| 0
| 0
| 0
| 0
| 0
| 0.054101
| 1
| 0.052356
| false
| 0
| 0.059337
| 0
| 0.115183
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3127311009fa113ef4c71fbe59bb63132385f583
| 7,541
|
py
|
Python
|
src/configs/configs_swin_unet.py
|
pabloduque0/wmh_jax
|
a8b59017c5cc36d52e85a6709722cac69f6015ef
|
[
"Apache-2.0"
] | null | null | null |
src/configs/configs_swin_unet.py
|
pabloduque0/wmh_jax
|
a8b59017c5cc36d52e85a6709722cac69f6015ef
|
[
"Apache-2.0"
] | null | null | null |
src/configs/configs_swin_unet.py
|
pabloduque0/wmh_jax
|
a8b59017c5cc36d52e85a6709722cac69f6015ef
|
[
"Apache-2.0"
] | null | null | null |
import ml_collections
import flax.linen as nn
from src.models import metrics
from src.augmentation import augmentation_functions
from src.train import eval_steps
from src.train import train_steps
from src import utils
import jax.numpy as jnp
import optax
from jax import random
def get_initial_config():
"""Get the hyperparameter configuration to train on TPUs."""
config = ml_collections.ConfigDict()
config.general_config = ml_collections.ConfigDict()
config.general_config.model_name = "SwinUnet"
config.general_config.data_path = "data/processed/noskull_stand_noaug"
config.general_config.multi_label = False
config.general_config.pad_crop_function = jnp.pad
config.general_config.pad_crop_kwargs = {"pad_width": ((0, 0), (12, 12), (12, 12), (0, 0)), "mode": "constant", "constant_values": 0}
config.general_config.call_kwargs = {"x": jnp.ones((1, 224, 224, 2))}
config.general_config.key_rngs = {"drop_path": random.PRNGKey(0)}
config.train_config = ml_collections.ConfigDict()
#config.train_config.loss_function = metrics.dice_loss
config.train_config.train_step_func = train_steps.train_step_dsc_loss
config.train_config.eval_step_func = eval_steps.eval_step_simple
config.train_config.optimizer = optax.adam
config.train_config.batch_size = 8
config.train_config.epochs = 100
#config.train_config.metrics_to_calc = (metrics.dice_coefficient, )
config.model_config = ml_collections.ConfigDict()
config.model_config.img_size = (224, 224)
config.model_config.patch_size = (4, 4)
config.model_config.in_chans = 2
config.model_config.num_classes = 1
config.model_config.embed_dim = 96
config.model_config.depths = (2, 2, 2, 2)
config.model_config.depths_decoder = (1, 2, 2, 2)
config.model_config.num_heads = (3, 6, 12, 24)
config.model_config.window_size = 7
config.model_config.mlp_ratio = 4.
config.model_config.qkv_bias = True
config.model_config.qk_scale = None
config.model_config.drop_rate = 0.
config.model_config.attn_drop_rate = 0.
config.model_config.drop_path_rate = 0.1
config.model_config.norm_layer = nn.LayerNorm
config.model_config.ape = False
config.model_config.patch_norm = True
config.model_config.use_checkpoint = False
config.model_config.final_upsample = "expand_first"
config.optimizer_config = ml_collections.ConfigDict()
config.optimizer_config.learning_rate = 0.0001
config.augment_config = ml_collections.ConfigDict()
config.augment_config.aug_function = augmentation_functions.no_augmentation
config.augment_config.m = None
config.augment_config.n = None
hash_ = utils.dict_hash(config.to_dict())
config.group_name = f"base_swin_unet_{hash_}"
return config
def get_augment_config():
"""Get the hyperparameter configuration to train on TPUs."""
config = ml_collections.ConfigDict()
config.general_config = ml_collections.ConfigDict()
config.general_config.model_name = "SwinUnet"
config.general_config.data_path = "data/processed/noskull_stand_noaug"
config.general_config.multi_label = False
config.general_config.pad_crop_function = jnp.pad
config.general_config.pad_crop_kwargs = {"pad_width": ((0, 0), (12, 12), (12, 12), (0, 0)), "mode": "constant", "constant_values": 0}
config.general_config.call_kwargs = {"x": jnp.ones((1, 224, 224, 2))}
config.general_config.key_rngs = {"drop_path": random.PRNGKey(0)}
config.train_config = ml_collections.ConfigDict()
#config.train_config.loss_function = metrics.dice_loss
config.train_config.train_step_func = train_steps.train_step_dsc_loss
config.train_config.eval_step_func = eval_steps.eval_step_simple
config.train_config.optimizer = optax.adam
config.train_config.batch_size = 8
config.train_config.epochs = 100
#config.train_config.metrics_to_calc = (metrics.dice_coefficient, )
config.model_config = ml_collections.ConfigDict()
config.model_config.img_size = (224, 224)
config.model_config.patch_size = (4, 4)
config.model_config.in_chans = 2
config.model_config.num_classes = 1
config.model_config.embed_dim = 96
config.model_config.depths = (2, 2, 2, 2)
config.model_config.depths_decoder = (1, 2, 2, 2)
config.model_config.num_heads = (3, 6, 12, 24)
config.model_config.window_size = 7
config.model_config.mlp_ratio = 4.
config.model_config.qkv_bias = True
config.model_config.qk_scale = None
config.model_config.drop_rate = 0.
config.model_config.attn_drop_rate = 0.
config.model_config.drop_path_rate = 0.1
config.model_config.norm_layer = nn.LayerNorm
config.model_config.ape = False
config.model_config.patch_norm = True
config.model_config.use_checkpoint = False
config.model_config.final_upsample = "expand_first"
config.optimizer_config = ml_collections.ConfigDict()
config.optimizer_config.learning_rate = 0.0001
config.augment_config = ml_collections.ConfigDict()
config.augment_config.aug_function = augmentation_functions.base_custom_randaugment
config.augment_config.m = 22
config.augment_config.n = 5
hash_ = utils.dict_hash(config.to_dict())
config.group_name = f"base_swin_unet_{hash_}"
return config
def get_augment_config_2():
"""Get the hyperparameter configuration to train on TPUs."""
config = ml_collections.ConfigDict()
config.general_config = ml_collections.ConfigDict()
config.general_config.model_name = "SwinUnet"
config.general_config.data_path = "data/processed/noskull_stand_noaug"
config.general_config.multi_label = False
config.general_config.pad_crop_function = jnp.pad
config.general_config.pad_crop_kwargs = {"pad_width": ((0, 0), (12, 12), (12, 12), (0, 0)), "mode": "constant", "constant_values": 0}
config.general_config.call_kwargs = {"x": jnp.ones((1, 224, 224, 2))}
config.general_config.key_rngs = {"drop_path": random.PRNGKey(0)}
config.train_config = ml_collections.ConfigDict()
#config.train_config.loss_function = metrics.dice_loss
config.train_config.train_step_func = train_steps.train_step_dsc_loss
config.train_config.eval_step_func = eval_steps.eval_step_simple
config.train_config.optimizer = optax.adam
config.train_config.batch_size = 8
config.train_config.epochs = 120
#config.train_config.metrics_to_calc = (metrics.dice_coefficient, )
config.model_config = ml_collections.ConfigDict()
config.model_config.img_size = (224, 224)
config.model_config.patch_size = (4, 4)
config.model_config.in_chans = 2
config.model_config.num_classes = 1
config.model_config.embed_dim = 96
config.model_config.depths = (2, 2, 2, 2)
config.model_config.depths_decoder = (1, 2, 2, 2)
config.model_config.num_heads = (3, 6, 12, 24)
config.model_config.window_size = 7
config.model_config.mlp_ratio = 4.
config.model_config.qkv_bias = True
config.model_config.qk_scale = None
config.model_config.drop_rate = 0.
config.model_config.attn_drop_rate = 0.
config.model_config.drop_path_rate = 0.1
config.model_config.norm_layer = nn.LayerNorm
config.model_config.ape = False
config.model_config.patch_norm = True
config.model_config.use_checkpoint = False
config.model_config.final_upsample = "expand_first"
config.optimizer_config = ml_collections.ConfigDict()
config.optimizer_config.learning_rate = 0.0002
config.augment_config = ml_collections.ConfigDict()
config.augment_config.aug_function = augmentation_functions.base_custom_randaugment
config.augment_config.m = 22
config.augment_config.n = 6
hash_ = utils.dict_hash(config.to_dict())
config.group_name = f"base_swin_unet_{hash_}"
return config
| 41.20765
| 135
| 0.775096
| 1,108
| 7,541
| 4.957581
| 0.126354
| 0.132168
| 0.194975
| 0.09503
| 0.94229
| 0.94229
| 0.94229
| 0.94229
| 0.94229
| 0.94229
| 0
| 0.029478
| 0.118287
| 7,541
| 183
| 136
| 41.20765
| 0.796661
| 0.069222
| 0
| 0.864865
| 0
| 0
| 0.052293
| 0.024003
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02027
| false
| 0
| 0.067568
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
314110d71491dd870f76bd611a5707a3d34e856d
| 89
|
py
|
Python
|
tests/test_py_pkg_setup.py
|
techdragon/python-py-pkg-setup
|
a7d589a1f606e6aee84fcf384b932184b1255021
|
[
"MIT"
] | null | null | null |
tests/test_py_pkg_setup.py
|
techdragon/python-py-pkg-setup
|
a7d589a1f606e6aee84fcf384b932184b1255021
|
[
"MIT"
] | null | null | null |
tests/test_py_pkg_setup.py
|
techdragon/python-py-pkg-setup
|
a7d589a1f606e6aee84fcf384b932184b1255021
|
[
"MIT"
] | null | null | null |
import py_pkg_setup
def test_main():
assert py_pkg_setup # use your library here
| 12.714286
| 48
| 0.741573
| 15
| 89
| 4.066667
| 0.8
| 0.163934
| 0.327869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213483
| 89
| 6
| 49
| 14.833333
| 0.871429
| 0.235955
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31711551a5c7b61380b5a1d219ff150dce0dd59c
| 49
|
py
|
Python
|
instance/config.py
|
Gichimu/news-articles
|
bd659b0823ca560b65e618bc8cb9fb0f47e31e19
|
[
"MIT"
] | null | null | null |
instance/config.py
|
Gichimu/news-articles
|
bd659b0823ca560b65e618bc8cb9fb0f47e31e19
|
[
"MIT"
] | 2
|
2019-11-17T20:33:12.000Z
|
2019-11-17T20:33:12.000Z
|
instance/config.py
|
Gichimu/news-articles
|
bd659b0823ca560b65e618bc8cb9fb0f47e31e19
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY = 'ad09e241ac7841fcba80da29e5d64351'
| 49
| 49
| 0.897959
| 4
| 49
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.404255
| 0.040816
| 49
| 1
| 49
| 49
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
31869ff9169909832b91929abeb9d72fd6a06dbb
| 52,112
|
py
|
Python
|
sdk/python/pulumi_akamai/gtm_domain.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/gtm_domain.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/gtm_domain.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['GtmDomainArgs', 'GtmDomain']
@pulumi.input_type
class GtmDomainArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
cname_coalescing_enabled: Optional[pulumi.Input[bool]] = None,
comment: Optional[pulumi.Input[str]] = None,
contract: Optional[pulumi.Input[str]] = None,
default_error_penalty: Optional[pulumi.Input[int]] = None,
default_ssl_client_certificate: Optional[pulumi.Input[str]] = None,
default_ssl_client_private_key: Optional[pulumi.Input[str]] = None,
default_timeout_penalty: Optional[pulumi.Input[int]] = None,
email_notification_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
end_user_mapping_enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None,
load_feedback: Optional[pulumi.Input[bool]] = None,
load_imbalance_percentage: Optional[pulumi.Input[float]] = None,
name: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a GtmDomain resource.
"""
pulumi.set(__self__, "type", type)
if cname_coalescing_enabled is not None:
pulumi.set(__self__, "cname_coalescing_enabled", cname_coalescing_enabled)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if contract is not None:
pulumi.set(__self__, "contract", contract)
if default_error_penalty is not None:
pulumi.set(__self__, "default_error_penalty", default_error_penalty)
if default_ssl_client_certificate is not None:
pulumi.set(__self__, "default_ssl_client_certificate", default_ssl_client_certificate)
if default_ssl_client_private_key is not None:
pulumi.set(__self__, "default_ssl_client_private_key", default_ssl_client_private_key)
if default_timeout_penalty is not None:
pulumi.set(__self__, "default_timeout_penalty", default_timeout_penalty)
if email_notification_lists is not None:
pulumi.set(__self__, "email_notification_lists", email_notification_lists)
if end_user_mapping_enabled is not None:
pulumi.set(__self__, "end_user_mapping_enabled", end_user_mapping_enabled)
if group is not None:
pulumi.set(__self__, "group", group)
if load_feedback is not None:
pulumi.set(__self__, "load_feedback", load_feedback)
if load_imbalance_percentage is not None:
pulumi.set(__self__, "load_imbalance_percentage", load_imbalance_percentage)
if name is not None:
pulumi.set(__self__, "name", name)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="cnameCoalescingEnabled")
def cname_coalescing_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cname_coalescing_enabled")
@cname_coalescing_enabled.setter
def cname_coalescing_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cname_coalescing_enabled", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter
def contract(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "contract")
@contract.setter
def contract(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract", value)
@property
@pulumi.getter(name="defaultErrorPenalty")
def default_error_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "default_error_penalty")
@default_error_penalty.setter
def default_error_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_error_penalty", value)
@property
@pulumi.getter(name="defaultSslClientCertificate")
def default_ssl_client_certificate(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "default_ssl_client_certificate")
@default_ssl_client_certificate.setter
def default_ssl_client_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_ssl_client_certificate", value)
@property
@pulumi.getter(name="defaultSslClientPrivateKey")
def default_ssl_client_private_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "default_ssl_client_private_key")
@default_ssl_client_private_key.setter
def default_ssl_client_private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_ssl_client_private_key", value)
@property
@pulumi.getter(name="defaultTimeoutPenalty")
def default_timeout_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "default_timeout_penalty")
@default_timeout_penalty.setter
def default_timeout_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_timeout_penalty", value)
@property
@pulumi.getter(name="emailNotificationLists")
def email_notification_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "email_notification_lists")
@email_notification_lists.setter
def email_notification_lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "email_notification_lists", value)
@property
@pulumi.getter(name="endUserMappingEnabled")
def end_user_mapping_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "end_user_mapping_enabled")
@end_user_mapping_enabled.setter
def end_user_mapping_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "end_user_mapping_enabled", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="loadFeedback")
def load_feedback(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "load_feedback")
@load_feedback.setter
def load_feedback(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "load_feedback", value)
@property
@pulumi.getter(name="loadImbalancePercentage")
def load_imbalance_percentage(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "load_imbalance_percentage")
@load_imbalance_percentage.setter
def load_imbalance_percentage(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "load_imbalance_percentage", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
@pulumi.input_type
class _GtmDomainState:
def __init__(__self__, *,
cname_coalescing_enabled: Optional[pulumi.Input[bool]] = None,
comment: Optional[pulumi.Input[str]] = None,
contract: Optional[pulumi.Input[str]] = None,
default_error_penalty: Optional[pulumi.Input[int]] = None,
default_health_max: Optional[pulumi.Input[float]] = None,
default_health_multiplier: Optional[pulumi.Input[float]] = None,
default_health_threshold: Optional[pulumi.Input[float]] = None,
default_max_unreachable_penalty: Optional[pulumi.Input[int]] = None,
default_ssl_client_certificate: Optional[pulumi.Input[str]] = None,
default_ssl_client_private_key: Optional[pulumi.Input[str]] = None,
default_timeout_penalty: Optional[pulumi.Input[int]] = None,
default_unreachable_threshold: Optional[pulumi.Input[float]] = None,
email_notification_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
end_user_mapping_enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None,
load_feedback: Optional[pulumi.Input[bool]] = None,
load_imbalance_percentage: Optional[pulumi.Input[float]] = None,
map_update_interval: Optional[pulumi.Input[int]] = None,
max_properties: Optional[pulumi.Input[int]] = None,
max_resources: Optional[pulumi.Input[int]] = None,
max_test_timeout: Optional[pulumi.Input[float]] = None,
max_ttl: Optional[pulumi.Input[int]] = None,
min_pingable_region_fraction: Optional[pulumi.Input[float]] = None,
min_test_interval: Optional[pulumi.Input[int]] = None,
min_ttl: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
ping_interval: Optional[pulumi.Input[int]] = None,
ping_packet_size: Optional[pulumi.Input[int]] = None,
round_robin_prefix: Optional[pulumi.Input[str]] = None,
servermonitor_liveness_count: Optional[pulumi.Input[int]] = None,
servermonitor_load_count: Optional[pulumi.Input[int]] = None,
servermonitor_pool: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering GtmDomain resources.
"""
if cname_coalescing_enabled is not None:
pulumi.set(__self__, "cname_coalescing_enabled", cname_coalescing_enabled)
if comment is not None:
pulumi.set(__self__, "comment", comment)
if contract is not None:
pulumi.set(__self__, "contract", contract)
if default_error_penalty is not None:
pulumi.set(__self__, "default_error_penalty", default_error_penalty)
if default_health_max is not None:
pulumi.set(__self__, "default_health_max", default_health_max)
if default_health_multiplier is not None:
pulumi.set(__self__, "default_health_multiplier", default_health_multiplier)
if default_health_threshold is not None:
pulumi.set(__self__, "default_health_threshold", default_health_threshold)
if default_max_unreachable_penalty is not None:
pulumi.set(__self__, "default_max_unreachable_penalty", default_max_unreachable_penalty)
if default_ssl_client_certificate is not None:
pulumi.set(__self__, "default_ssl_client_certificate", default_ssl_client_certificate)
if default_ssl_client_private_key is not None:
pulumi.set(__self__, "default_ssl_client_private_key", default_ssl_client_private_key)
if default_timeout_penalty is not None:
pulumi.set(__self__, "default_timeout_penalty", default_timeout_penalty)
if default_unreachable_threshold is not None:
pulumi.set(__self__, "default_unreachable_threshold", default_unreachable_threshold)
if email_notification_lists is not None:
pulumi.set(__self__, "email_notification_lists", email_notification_lists)
if end_user_mapping_enabled is not None:
pulumi.set(__self__, "end_user_mapping_enabled", end_user_mapping_enabled)
if group is not None:
pulumi.set(__self__, "group", group)
if load_feedback is not None:
pulumi.set(__self__, "load_feedback", load_feedback)
if load_imbalance_percentage is not None:
pulumi.set(__self__, "load_imbalance_percentage", load_imbalance_percentage)
if map_update_interval is not None:
pulumi.set(__self__, "map_update_interval", map_update_interval)
if max_properties is not None:
pulumi.set(__self__, "max_properties", max_properties)
if max_resources is not None:
pulumi.set(__self__, "max_resources", max_resources)
if max_test_timeout is not None:
pulumi.set(__self__, "max_test_timeout", max_test_timeout)
if max_ttl is not None:
pulumi.set(__self__, "max_ttl", max_ttl)
if min_pingable_region_fraction is not None:
pulumi.set(__self__, "min_pingable_region_fraction", min_pingable_region_fraction)
if min_test_interval is not None:
pulumi.set(__self__, "min_test_interval", min_test_interval)
if min_ttl is not None:
pulumi.set(__self__, "min_ttl", min_ttl)
if name is not None:
pulumi.set(__self__, "name", name)
if ping_interval is not None:
pulumi.set(__self__, "ping_interval", ping_interval)
if ping_packet_size is not None:
pulumi.set(__self__, "ping_packet_size", ping_packet_size)
if round_robin_prefix is not None:
pulumi.set(__self__, "round_robin_prefix", round_robin_prefix)
if servermonitor_liveness_count is not None:
pulumi.set(__self__, "servermonitor_liveness_count", servermonitor_liveness_count)
if servermonitor_load_count is not None:
pulumi.set(__self__, "servermonitor_load_count", servermonitor_load_count)
if servermonitor_pool is not None:
pulumi.set(__self__, "servermonitor_pool", servermonitor_pool)
if type is not None:
pulumi.set(__self__, "type", type)
if wait_on_complete is not None:
pulumi.set(__self__, "wait_on_complete", wait_on_complete)
@property
@pulumi.getter(name="cnameCoalescingEnabled")
def cname_coalescing_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cname_coalescing_enabled")
@cname_coalescing_enabled.setter
def cname_coalescing_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cname_coalescing_enabled", value)
@property
@pulumi.getter
def comment(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "comment")
@comment.setter
def comment(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "comment", value)
@property
@pulumi.getter
def contract(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "contract")
@contract.setter
def contract(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contract", value)
@property
@pulumi.getter(name="defaultErrorPenalty")
def default_error_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "default_error_penalty")
@default_error_penalty.setter
def default_error_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_error_penalty", value)
@property
@pulumi.getter(name="defaultHealthMax")
def default_health_max(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "default_health_max")
@default_health_max.setter
def default_health_max(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "default_health_max", value)
@property
@pulumi.getter(name="defaultHealthMultiplier")
def default_health_multiplier(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "default_health_multiplier")
@default_health_multiplier.setter
def default_health_multiplier(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "default_health_multiplier", value)
@property
@pulumi.getter(name="defaultHealthThreshold")
def default_health_threshold(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "default_health_threshold")
@default_health_threshold.setter
def default_health_threshold(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "default_health_threshold", value)
@property
@pulumi.getter(name="defaultMaxUnreachablePenalty")
def default_max_unreachable_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "default_max_unreachable_penalty")
@default_max_unreachable_penalty.setter
def default_max_unreachable_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_max_unreachable_penalty", value)
@property
@pulumi.getter(name="defaultSslClientCertificate")
def default_ssl_client_certificate(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "default_ssl_client_certificate")
@default_ssl_client_certificate.setter
def default_ssl_client_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_ssl_client_certificate", value)
@property
@pulumi.getter(name="defaultSslClientPrivateKey")
def default_ssl_client_private_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "default_ssl_client_private_key")
@default_ssl_client_private_key.setter
def default_ssl_client_private_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "default_ssl_client_private_key", value)
@property
@pulumi.getter(name="defaultTimeoutPenalty")
def default_timeout_penalty(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "default_timeout_penalty")
@default_timeout_penalty.setter
def default_timeout_penalty(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "default_timeout_penalty", value)
@property
@pulumi.getter(name="defaultUnreachableThreshold")
def default_unreachable_threshold(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "default_unreachable_threshold")
@default_unreachable_threshold.setter
def default_unreachable_threshold(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "default_unreachable_threshold", value)
@property
@pulumi.getter(name="emailNotificationLists")
def email_notification_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "email_notification_lists")
@email_notification_lists.setter
def email_notification_lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "email_notification_lists", value)
@property
@pulumi.getter(name="endUserMappingEnabled")
def end_user_mapping_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "end_user_mapping_enabled")
@end_user_mapping_enabled.setter
def end_user_mapping_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "end_user_mapping_enabled", value)
@property
@pulumi.getter
def group(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "group")
@group.setter
def group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group", value)
@property
@pulumi.getter(name="loadFeedback")
def load_feedback(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "load_feedback")
@load_feedback.setter
def load_feedback(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "load_feedback", value)
@property
@pulumi.getter(name="loadImbalancePercentage")
def load_imbalance_percentage(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "load_imbalance_percentage")
@load_imbalance_percentage.setter
def load_imbalance_percentage(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "load_imbalance_percentage", value)
@property
@pulumi.getter(name="mapUpdateInterval")
def map_update_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "map_update_interval")
@map_update_interval.setter
def map_update_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "map_update_interval", value)
@property
@pulumi.getter(name="maxProperties")
def max_properties(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_properties")
@max_properties.setter
def max_properties(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_properties", value)
@property
@pulumi.getter(name="maxResources")
def max_resources(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_resources")
@max_resources.setter
def max_resources(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_resources", value)
@property
@pulumi.getter(name="maxTestTimeout")
def max_test_timeout(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "max_test_timeout")
@max_test_timeout.setter
def max_test_timeout(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "max_test_timeout", value)
@property
@pulumi.getter(name="maxTtl")
def max_ttl(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_ttl")
@max_ttl.setter
def max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_ttl", value)
@property
@pulumi.getter(name="minPingableRegionFraction")
def min_pingable_region_fraction(self) -> Optional[pulumi.Input[float]]:
return pulumi.get(self, "min_pingable_region_fraction")
@min_pingable_region_fraction.setter
def min_pingable_region_fraction(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "min_pingable_region_fraction", value)
@property
@pulumi.getter(name="minTestInterval")
def min_test_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_test_interval")
@min_test_interval.setter
def min_test_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_test_interval", value)
@property
@pulumi.getter(name="minTtl")
def min_ttl(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_ttl")
@min_ttl.setter
def min_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ttl", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pingInterval")
def ping_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ping_interval")
@ping_interval.setter
def ping_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ping_interval", value)
@property
@pulumi.getter(name="pingPacketSize")
def ping_packet_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ping_packet_size")
@ping_packet_size.setter
def ping_packet_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ping_packet_size", value)
@property
@pulumi.getter(name="roundRobinPrefix")
def round_robin_prefix(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "round_robin_prefix")
@round_robin_prefix.setter
def round_robin_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "round_robin_prefix", value)
@property
@pulumi.getter(name="servermonitorLivenessCount")
def servermonitor_liveness_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "servermonitor_liveness_count")
@servermonitor_liveness_count.setter
def servermonitor_liveness_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "servermonitor_liveness_count", value)
@property
@pulumi.getter(name="servermonitorLoadCount")
def servermonitor_load_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "servermonitor_load_count")
@servermonitor_load_count.setter
def servermonitor_load_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "servermonitor_load_count", value)
@property
@pulumi.getter(name="servermonitorPool")
def servermonitor_pool(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "servermonitor_pool")
@servermonitor_pool.setter
def servermonitor_pool(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "servermonitor_pool", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "wait_on_complete")
@wait_on_complete.setter
def wait_on_complete(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_on_complete", value)
class GtmDomain(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cname_coalescing_enabled: Optional[pulumi.Input[bool]] = None,
comment: Optional[pulumi.Input[str]] = None,
contract: Optional[pulumi.Input[str]] = None,
default_error_penalty: Optional[pulumi.Input[int]] = None,
default_ssl_client_certificate: Optional[pulumi.Input[str]] = None,
default_ssl_client_private_key: Optional[pulumi.Input[str]] = None,
default_timeout_penalty: Optional[pulumi.Input[int]] = None,
email_notification_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
end_user_mapping_enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None,
load_feedback: Optional[pulumi.Input[bool]] = None,
load_imbalance_percentage: Optional[pulumi.Input[float]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Use the `GtmDomain` resource to create, configure, and import a GTM Domain, which is a basic building block of a traffic management configuration.
> **Note** Import requires an ID with this format: `existing_domain_name`.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
demodomain = akamai.GtmDomain("demodomain",
comment="some comment",
contract="XXX",
group="100",
type="basic")
```
## Argument reference
This resource supports these arguments:
* `contract` - (Required) If creating a domain, the contract ID.
* `group` - (Required) If creating a domain, the currently selected group ID.
* `name` - (Required) The DNS name for a collection of GTM Properties.
* `type` - (Required) Th type of GTM domain. Options include `failover-only`, `static`, `weighted`, `basic`, or `full`.
* `wait_on_complete` - (Optional) A boolean that, if set to `true`, waits for transaction to complete.
* `comment` - (Optional) A descriptive note about changes to the domain. The maximum is 4000 characters.
* `email_notification_list` - (Optional) A list of email addresses to notify when a change is made to the domain.
* `default_timeout_penalty` - (Optional) Specifies the timeout penalty score. Default is `25`.
* `load_imbalance_percentage` - (Optional) Indicates the percentage of load imbalance factor (LIF) for the domain.
* `default_ssl_client_private_key` - (Optional) Specifies a Base64-encoded private key that corresponds with the TLS certificate for HTTPS, SMTPS, POPS, and TCPS liveness tests.
* `default_error_penalty` - (Optional) Specifies the download penalty score. The default is `75`. If the download encounters an error, the web agent computes a score that is either the download time in seconds or a penalty score.
* `cname_coalescing_enabled` - (Optional) A boolean that if set to `true`, GTM collapses CNAME redirections in DNS answers when it knows the target of the CNAME.
* `load_feedback` - (Optional) A boolean indicating whether one or more measurements of load (resources) are defined by you and supplied by each data center in real time to balance load.
* `default_ssl_client_certificate` - (Optional) Specifies an optional Base64-encoded certificate that corresponds with the private key for TLS-based liveness tests (HTTPS, SMTPS, POPS, and TCPS).
* `end_user_mapping_enabled` - (Optional) A boolean indicating whether whether the GTM Domain is using end user client subnet mapping.
## Attribute reference
This resource returns these computed attributes in the state file:
* `default_unreachable_threshold`
* `min_pingable_region_fraction`
* `servermonitor_liveness_count`
* `round_robin_prefix`
* `servermonitor_load_count`
* `ping_interval`
* `max_ttl`
* `default_health_max`
* `map_update_interval`
* `max_properties`
* `max_resources`
* `default_error_penalty`
* `max_test_timeout`
* `default_health_multiplier`
* `servermonitor_pool`
* `min_ttl`
* `default_max_unreachable_penalty`
* `default_health_threshold`
* `min_test_interval`
* `ping_packet_size`
## Schema reference
You can download the GTM Domain backing schema from the [Global Traffic Management API](https://developer.akamai.com/api/web_performance/global_traffic_management/v1.html#domain) page.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GtmDomainArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Use the `GtmDomain` resource to create, configure, and import a GTM Domain, which is a basic building block of a traffic management configuration.
> **Note** Import requires an ID with this format: `existing_domain_name`.
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
demodomain = akamai.GtmDomain("demodomain",
comment="some comment",
contract="XXX",
group="100",
type="basic")
```
## Argument reference
This resource supports these arguments:
* `contract` - (Required) If creating a domain, the contract ID.
* `group` - (Required) If creating a domain, the currently selected group ID.
* `name` - (Required) The DNS name for a collection of GTM Properties.
* `type` - (Required) Th type of GTM domain. Options include `failover-only`, `static`, `weighted`, `basic`, or `full`.
* `wait_on_complete` - (Optional) A boolean that, if set to `true`, waits for transaction to complete.
* `comment` - (Optional) A descriptive note about changes to the domain. The maximum is 4000 characters.
* `email_notification_list` - (Optional) A list of email addresses to notify when a change is made to the domain.
* `default_timeout_penalty` - (Optional) Specifies the timeout penalty score. Default is `25`.
* `load_imbalance_percentage` - (Optional) Indicates the percentage of load imbalance factor (LIF) for the domain.
* `default_ssl_client_private_key` - (Optional) Specifies a Base64-encoded private key that corresponds with the TLS certificate for HTTPS, SMTPS, POPS, and TCPS liveness tests.
* `default_error_penalty` - (Optional) Specifies the download penalty score. The default is `75`. If the download encounters an error, the web agent computes a score that is either the download time in seconds or a penalty score.
* `cname_coalescing_enabled` - (Optional) A boolean that if set to `true`, GTM collapses CNAME redirections in DNS answers when it knows the target of the CNAME.
* `load_feedback` - (Optional) A boolean indicating whether one or more measurements of load (resources) are defined by you and supplied by each data center in real time to balance load.
* `default_ssl_client_certificate` - (Optional) Specifies an optional Base64-encoded certificate that corresponds with the private key for TLS-based liveness tests (HTTPS, SMTPS, POPS, and TCPS).
* `end_user_mapping_enabled` - (Optional) A boolean indicating whether whether the GTM Domain is using end user client subnet mapping.
## Attribute reference
This resource returns these computed attributes in the state file:
* `default_unreachable_threshold`
* `min_pingable_region_fraction`
* `servermonitor_liveness_count`
* `round_robin_prefix`
* `servermonitor_load_count`
* `ping_interval`
* `max_ttl`
* `default_health_max`
* `map_update_interval`
* `max_properties`
* `max_resources`
* `default_error_penalty`
* `max_test_timeout`
* `default_health_multiplier`
* `servermonitor_pool`
* `min_ttl`
* `default_max_unreachable_penalty`
* `default_health_threshold`
* `min_test_interval`
* `ping_packet_size`
## Schema reference
You can download the GTM Domain backing schema from the [Global Traffic Management API](https://developer.akamai.com/api/web_performance/global_traffic_management/v1.html#domain) page.
:param str resource_name: The name of the resource.
:param GtmDomainArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GtmDomainArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cname_coalescing_enabled: Optional[pulumi.Input[bool]] = None,
comment: Optional[pulumi.Input[str]] = None,
contract: Optional[pulumi.Input[str]] = None,
default_error_penalty: Optional[pulumi.Input[int]] = None,
default_ssl_client_certificate: Optional[pulumi.Input[str]] = None,
default_ssl_client_private_key: Optional[pulumi.Input[str]] = None,
default_timeout_penalty: Optional[pulumi.Input[int]] = None,
email_notification_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
end_user_mapping_enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None,
load_feedback: Optional[pulumi.Input[bool]] = None,
load_imbalance_percentage: Optional[pulumi.Input[float]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GtmDomainArgs.__new__(GtmDomainArgs)
__props__.__dict__["cname_coalescing_enabled"] = cname_coalescing_enabled
__props__.__dict__["comment"] = comment
__props__.__dict__["contract"] = contract
__props__.__dict__["default_error_penalty"] = default_error_penalty
__props__.__dict__["default_ssl_client_certificate"] = default_ssl_client_certificate
__props__.__dict__["default_ssl_client_private_key"] = default_ssl_client_private_key
__props__.__dict__["default_timeout_penalty"] = default_timeout_penalty
__props__.__dict__["email_notification_lists"] = email_notification_lists
__props__.__dict__["end_user_mapping_enabled"] = end_user_mapping_enabled
__props__.__dict__["group"] = group
__props__.__dict__["load_feedback"] = load_feedback
__props__.__dict__["load_imbalance_percentage"] = load_imbalance_percentage
__props__.__dict__["name"] = name
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["wait_on_complete"] = wait_on_complete
__props__.__dict__["default_health_max"] = None
__props__.__dict__["default_health_multiplier"] = None
__props__.__dict__["default_health_threshold"] = None
__props__.__dict__["default_max_unreachable_penalty"] = None
__props__.__dict__["default_unreachable_threshold"] = None
__props__.__dict__["map_update_interval"] = None
__props__.__dict__["max_properties"] = None
__props__.__dict__["max_resources"] = None
__props__.__dict__["max_test_timeout"] = None
__props__.__dict__["max_ttl"] = None
__props__.__dict__["min_pingable_region_fraction"] = None
__props__.__dict__["min_test_interval"] = None
__props__.__dict__["min_ttl"] = None
__props__.__dict__["ping_interval"] = None
__props__.__dict__["ping_packet_size"] = None
__props__.__dict__["round_robin_prefix"] = None
__props__.__dict__["servermonitor_liveness_count"] = None
__props__.__dict__["servermonitor_load_count"] = None
__props__.__dict__["servermonitor_pool"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="akamai:trafficmanagement/gtmDomain:GtmDomain")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(GtmDomain, __self__).__init__(
'akamai:index/gtmDomain:GtmDomain',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cname_coalescing_enabled: Optional[pulumi.Input[bool]] = None,
comment: Optional[pulumi.Input[str]] = None,
contract: Optional[pulumi.Input[str]] = None,
default_error_penalty: Optional[pulumi.Input[int]] = None,
default_health_max: Optional[pulumi.Input[float]] = None,
default_health_multiplier: Optional[pulumi.Input[float]] = None,
default_health_threshold: Optional[pulumi.Input[float]] = None,
default_max_unreachable_penalty: Optional[pulumi.Input[int]] = None,
default_ssl_client_certificate: Optional[pulumi.Input[str]] = None,
default_ssl_client_private_key: Optional[pulumi.Input[str]] = None,
default_timeout_penalty: Optional[pulumi.Input[int]] = None,
default_unreachable_threshold: Optional[pulumi.Input[float]] = None,
email_notification_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
end_user_mapping_enabled: Optional[pulumi.Input[bool]] = None,
group: Optional[pulumi.Input[str]] = None,
load_feedback: Optional[pulumi.Input[bool]] = None,
load_imbalance_percentage: Optional[pulumi.Input[float]] = None,
map_update_interval: Optional[pulumi.Input[int]] = None,
max_properties: Optional[pulumi.Input[int]] = None,
max_resources: Optional[pulumi.Input[int]] = None,
max_test_timeout: Optional[pulumi.Input[float]] = None,
max_ttl: Optional[pulumi.Input[int]] = None,
min_pingable_region_fraction: Optional[pulumi.Input[float]] = None,
min_test_interval: Optional[pulumi.Input[int]] = None,
min_ttl: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
ping_interval: Optional[pulumi.Input[int]] = None,
ping_packet_size: Optional[pulumi.Input[int]] = None,
round_robin_prefix: Optional[pulumi.Input[str]] = None,
servermonitor_liveness_count: Optional[pulumi.Input[int]] = None,
servermonitor_load_count: Optional[pulumi.Input[int]] = None,
servermonitor_pool: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
wait_on_complete: Optional[pulumi.Input[bool]] = None) -> 'GtmDomain':
"""
Get an existing GtmDomain resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GtmDomainState.__new__(_GtmDomainState)
__props__.__dict__["cname_coalescing_enabled"] = cname_coalescing_enabled
__props__.__dict__["comment"] = comment
__props__.__dict__["contract"] = contract
__props__.__dict__["default_error_penalty"] = default_error_penalty
__props__.__dict__["default_health_max"] = default_health_max
__props__.__dict__["default_health_multiplier"] = default_health_multiplier
__props__.__dict__["default_health_threshold"] = default_health_threshold
__props__.__dict__["default_max_unreachable_penalty"] = default_max_unreachable_penalty
__props__.__dict__["default_ssl_client_certificate"] = default_ssl_client_certificate
__props__.__dict__["default_ssl_client_private_key"] = default_ssl_client_private_key
__props__.__dict__["default_timeout_penalty"] = default_timeout_penalty
__props__.__dict__["default_unreachable_threshold"] = default_unreachable_threshold
__props__.__dict__["email_notification_lists"] = email_notification_lists
__props__.__dict__["end_user_mapping_enabled"] = end_user_mapping_enabled
__props__.__dict__["group"] = group
__props__.__dict__["load_feedback"] = load_feedback
__props__.__dict__["load_imbalance_percentage"] = load_imbalance_percentage
__props__.__dict__["map_update_interval"] = map_update_interval
__props__.__dict__["max_properties"] = max_properties
__props__.__dict__["max_resources"] = max_resources
__props__.__dict__["max_test_timeout"] = max_test_timeout
__props__.__dict__["max_ttl"] = max_ttl
__props__.__dict__["min_pingable_region_fraction"] = min_pingable_region_fraction
__props__.__dict__["min_test_interval"] = min_test_interval
__props__.__dict__["min_ttl"] = min_ttl
__props__.__dict__["name"] = name
__props__.__dict__["ping_interval"] = ping_interval
__props__.__dict__["ping_packet_size"] = ping_packet_size
__props__.__dict__["round_robin_prefix"] = round_robin_prefix
__props__.__dict__["servermonitor_liveness_count"] = servermonitor_liveness_count
__props__.__dict__["servermonitor_load_count"] = servermonitor_load_count
__props__.__dict__["servermonitor_pool"] = servermonitor_pool
__props__.__dict__["type"] = type
__props__.__dict__["wait_on_complete"] = wait_on_complete
return GtmDomain(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="cnameCoalescingEnabled")
def cname_coalescing_enabled(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "cname_coalescing_enabled")
@property
@pulumi.getter
def comment(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "comment")
@property
@pulumi.getter
def contract(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "contract")
@property
@pulumi.getter(name="defaultErrorPenalty")
def default_error_penalty(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "default_error_penalty")
@property
@pulumi.getter(name="defaultHealthMax")
def default_health_max(self) -> pulumi.Output[float]:
return pulumi.get(self, "default_health_max")
@property
@pulumi.getter(name="defaultHealthMultiplier")
def default_health_multiplier(self) -> pulumi.Output[float]:
return pulumi.get(self, "default_health_multiplier")
@property
@pulumi.getter(name="defaultHealthThreshold")
def default_health_threshold(self) -> pulumi.Output[float]:
return pulumi.get(self, "default_health_threshold")
@property
@pulumi.getter(name="defaultMaxUnreachablePenalty")
def default_max_unreachable_penalty(self) -> pulumi.Output[int]:
return pulumi.get(self, "default_max_unreachable_penalty")
@property
@pulumi.getter(name="defaultSslClientCertificate")
def default_ssl_client_certificate(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "default_ssl_client_certificate")
@property
@pulumi.getter(name="defaultSslClientPrivateKey")
def default_ssl_client_private_key(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "default_ssl_client_private_key")
@property
@pulumi.getter(name="defaultTimeoutPenalty")
def default_timeout_penalty(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "default_timeout_penalty")
@property
@pulumi.getter(name="defaultUnreachableThreshold")
def default_unreachable_threshold(self) -> pulumi.Output[float]:
return pulumi.get(self, "default_unreachable_threshold")
@property
@pulumi.getter(name="emailNotificationLists")
def email_notification_lists(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "email_notification_lists")
@property
@pulumi.getter(name="endUserMappingEnabled")
def end_user_mapping_enabled(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "end_user_mapping_enabled")
@property
@pulumi.getter
def group(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "group")
@property
@pulumi.getter(name="loadFeedback")
def load_feedback(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "load_feedback")
@property
@pulumi.getter(name="loadImbalancePercentage")
def load_imbalance_percentage(self) -> pulumi.Output[Optional[float]]:
return pulumi.get(self, "load_imbalance_percentage")
@property
@pulumi.getter(name="mapUpdateInterval")
def map_update_interval(self) -> pulumi.Output[int]:
return pulumi.get(self, "map_update_interval")
@property
@pulumi.getter(name="maxProperties")
def max_properties(self) -> pulumi.Output[int]:
return pulumi.get(self, "max_properties")
@property
@pulumi.getter(name="maxResources")
def max_resources(self) -> pulumi.Output[int]:
return pulumi.get(self, "max_resources")
@property
@pulumi.getter(name="maxTestTimeout")
def max_test_timeout(self) -> pulumi.Output[float]:
return pulumi.get(self, "max_test_timeout")
@property
@pulumi.getter(name="maxTtl")
def max_ttl(self) -> pulumi.Output[int]:
return pulumi.get(self, "max_ttl")
@property
@pulumi.getter(name="minPingableRegionFraction")
def min_pingable_region_fraction(self) -> pulumi.Output[float]:
return pulumi.get(self, "min_pingable_region_fraction")
@property
@pulumi.getter(name="minTestInterval")
def min_test_interval(self) -> pulumi.Output[int]:
return pulumi.get(self, "min_test_interval")
@property
@pulumi.getter(name="minTtl")
def min_ttl(self) -> pulumi.Output[int]:
return pulumi.get(self, "min_ttl")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pingInterval")
def ping_interval(self) -> pulumi.Output[int]:
return pulumi.get(self, "ping_interval")
@property
@pulumi.getter(name="pingPacketSize")
def ping_packet_size(self) -> pulumi.Output[int]:
return pulumi.get(self, "ping_packet_size")
@property
@pulumi.getter(name="roundRobinPrefix")
def round_robin_prefix(self) -> pulumi.Output[str]:
return pulumi.get(self, "round_robin_prefix")
@property
@pulumi.getter(name="servermonitorLivenessCount")
def servermonitor_liveness_count(self) -> pulumi.Output[int]:
return pulumi.get(self, "servermonitor_liveness_count")
@property
@pulumi.getter(name="servermonitorLoadCount")
def servermonitor_load_count(self) -> pulumi.Output[int]:
return pulumi.get(self, "servermonitor_load_count")
@property
@pulumi.getter(name="servermonitorPool")
def servermonitor_pool(self) -> pulumi.Output[str]:
return pulumi.get(self, "servermonitor_pool")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="waitOnComplete")
def wait_on_complete(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "wait_on_complete")
| 46.362989
| 237
| 0.684698
| 6,020
| 52,112
| 5.5701
| 0.053821
| 0.073482
| 0.117858
| 0.04703
| 0.922343
| 0.890642
| 0.861744
| 0.827955
| 0.794346
| 0.738966
| 0
| 0.000802
| 0.210796
| 52,112
| 1,123
| 238
| 46.404274
| 0.814526
| 0.143134
| 0
| 0.711491
| 1
| 0
| 0.139657
| 0.088297
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169927
| false
| 0.001222
| 0.006112
| 0.101467
| 0.282396
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
31a86234d2a344523d7ab757c320405829b6abd6
| 39,949
|
py
|
Python
|
src/server/server.py
|
el-ideal-ideas/MocaUsersAPI
|
8acc17d14ad1e3c57142b24a812a1806c44180a5
|
[
"MIT"
] | null | null | null |
src/server/server.py
|
el-ideal-ideas/MocaUsersAPI
|
8acc17d14ad1e3c57142b24a812a1806c44180a5
|
[
"MIT"
] | null | null | null |
src/server/server.py
|
el-ideal-ideas/MocaUsersAPI
|
8acc17d14ad1e3c57142b24a812a1806c44180a5
|
[
"MIT"
] | null | null | null |
# Ω*
# ■ ■■■■■
# ■ ■■ ■■
# ■ ■■ ■
# ■ ■■
# ■■■■■ ■ ■■■
# ■■ ■■ ■ ■■■
# ■■ ■■ ■ ■■■■
# ■■ ■■ ■ ■■■■
# ■■■■■■■■■ ■ ■■■
# ■■ ■ ■■
# ■■ ■ ■■
# ■■ ■ ■ ■■ ■■
# ■■ ■■ ■ ■■■ ■■■ ■■
# ■■■■■ ■ ■■■ ■■■■■
"""
Copyright (c) 2020.5.28 [el.ideal-ideas]
This software is released under the MIT License.
see LICENSE.txt or following URL.
https://www.el-ideal-ideas.com/MocaSystem/LICENSE/
"""
# -- Imports --------------------------------------------------------------------------
from ssl import SSLContext
from src.core import moca_config, LOG_DIR, VERSION
from src.moca_modules.moca_utils import *
from src.moca_modules import get_args
from sanic import Blueprint
from sanic.request import Request
from sanic.response import HTTPResponse, text, raw
from src.moca_modules import sanic_json as json
from sanic.exceptions import Forbidden, NotFound
from .MocaUsersAPIServer import MocaUsersAPIServer
from secrets import compare_digest
from base64 import b64encode
# -------------------------------------------------------------------------- Imports --
# -- Variables --------------------------------------------------------------------------
users_api: Blueprint = Blueprint('users_api', 'users')
# -------------------------------------------------------------------------- Variables --
# -- Server --------------------------------------------------------------------------
ssl: Optional[SSLContext]
if isinstance(moca_config.get('certfile'), str) and \
isinstance(moca_config.get('keyfile'), str) and \
Path(moca_config.get('certfile')).is_file() and \
Path(moca_config.get('keyfile')).is_file():
ssl = MocaUsersAPIServer.create_ssl_context(moca_config.get('certfile'),
moca_config.get('keyfile'))
else:
ssl = None
server: MocaUsersAPIServer = MocaUsersAPIServer(
'MocaUsersAPIServer',
moca_config.get('host', '0.0.0.0'),
moca_config.get('port', 5980),
ssl,
LOG_DIR,
None,
moca_config.get('access_log', True),
moca_config.get('use_ipv6', False),
moca_config.get('workers', 0),
moca_config.get('headers', {})
)
app = server.app
server.add_blueprint(users_api)
# -------------------------------------------------------------------------- Server --
# -- Middleware --------------------------------------------------------------------------
@app.middleware('request')
async def global_ip_rate_limit(request: Request):
if request.app.moca_access.check_ip_rate_limit(request):
pass
else:
raise Forbidden('too many requests.')
# -------------------------------------------------------------------------- Middleware --
# -- Routes --------------------------------------------------------------------------
@users_api.route('/version', methods={'GET', 'POST', 'OPTIONS'})
async def version(request: Request) -> HTTPResponse:
return text(VERSION)
@users_api.route('/insert_dummy_data', methods={'GET', 'POST', 'OPTIONS'})
async def insert_dummy_data(request: Request) -> HTTPResponse:
root_pass, api_key = get_args(request, 'root_pass', 'api_key')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
if not (isinstance(root_pass, str) and root_pass.isascii() and (8 <= len(root_pass) <= 32)):
raise Forbidden('root password format error.')
if not compare_digest(root_pass, request.app.moca_config.get('root_pass', '')):
raise Forbidden('invalid root password.')
await request.app.moca_users.insert_dummy_data()
return text('success.')
@users_api.route('/create_user', methods={'GET', 'POST', 'OPTIONS'})
async def create_user(request: Request) -> HTTPResponse:
api_key, username, password, userid, email = get_args(request, 'api_key', 'username', 'password', 'userid', 'email')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.create_user(username, password, userid, email)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/check_userid', methods={'GET', 'POST', 'OPTIONS'})
async def check_userid(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.check_userid(userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/send_email_verify_message', methods={'GET', 'POST', 'OPTIONS'})
async def send_email_verify_message(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.send_email_verify_message(userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/add_email_address', methods={'GET', 'POST', 'OPTIONS'})
async def add_email_address(request: Request) -> HTTPResponse:
api_key, userid, email, access_token = get_args(request, 'api_key', 'userid', 'email', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.add_email_address(userid, email, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/verify_email', methods={'GET', 'POST', 'OPTIONS'})
async def verify_email(request: Request) -> HTTPResponse:
api_key, userid, token = get_args(request, 'api_key', 'userid', 'token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.verify_email(userid, token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/check_access_token', methods={'GET', 'POST', 'OPTIONS'})
async def check_access_token(request: Request) -> HTTPResponse:
api_key, userid, access_token = get_args(request, 'api_key', 'userid', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.check_access_token(userid, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/login', methods={'GET', 'POST', 'OPTIONS'})
async def login(request: Request) -> HTTPResponse:
api_key, userid, password = get_args(request, 'api_key', 'userid', 'password')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.login(userid, password, server.get_remote_address(request))
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/send_phone_login_code', methods={'GET', 'POST', 'OPTIONS'})
async def send_phone_login_code(request: Request) -> HTTPResponse:
api_key, userid, phone = get_args(request, 'api_key', 'userid', 'phone')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.send_phone_login_code(userid, phone)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/login_by_phone', methods={'GET', 'POST', 'OPTIONS'})
async def login_by_phone(request: Request) -> HTTPResponse:
api_key, userid, token, phone = get_args(request, 'api_key', 'userid', 'token', 'phone')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.login_by_phone(userid, token, phone, server.get_remote_address(request))
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/search_users_by_name', methods={'GET', 'POST', 'OPTIONS'})
async def search_users_by_name(request: Request) -> HTTPResponse:
api_key, username = get_args(request, 'api_key', 'username')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.search_users_by_name(username)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/search_user_by_id', methods={'GET', 'POST', 'OPTIONS'})
async def search_user_by_id(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.search_user_by_id(userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/search_users', methods={'GET', 'POST', 'OPTIONS'})
async def search_users(request: Request) -> HTTPResponse:
api_key, keywords = get_args(request, 'api_key', 'keywords')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.search_users(keywords)
return json(data)
@users_api.route('/save_profile', methods={'GET', 'POST', 'OPTIONS'})
async def save_profile(request: Request) -> HTTPResponse:
api_key, userid, profile, access_token = get_args(request, 'api_key', 'userid', 'profile', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_profile(userid, profile, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_profile', methods={'GET', 'POST', 'OPTIONS'})
async def get_profile(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_profile(userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_profiles', methods={'GET', 'POST', 'OPTIONS'})
async def get_profiles(request: Request) -> HTTPResponse:
api_key, userid_list = get_args(request, 'api_key', 'userid_list')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.get_profiles(userid_list)
return json(data)
@users_api.route('/add_phone_number', methods={'GET', 'POST', 'OPTIONS'})
async def add_phone_number(request: Request) -> HTTPResponse:
api_key, userid, phone, access_token = get_args(request, 'api_key', 'userid', 'phone', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.add_phone_number(userid, phone, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/verify_phone', methods={'GET', 'POST', 'OPTIONS'})
async def verify_phone(request: Request) -> HTTPResponse:
api_key, userid, code, access_token = get_args(request, 'api_key', 'userid', 'code', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.verify_phone(userid, code, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/has_verified_phone', methods={'GET', 'POST', 'OPTIONS'})
async def has_verified_phone(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.has_verified_phone(userid)
if status:
return text('0')
else:
return text('1')
@users_api.route('/check_password', methods={'GET', 'POST', 'OPTIONS'})
async def check_password(request: Request) -> HTTPResponse:
api_key, userid, password = get_args(request, 'api_key', 'userid', 'password')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.check_password(userid, password)
if status:
return text('0')
else:
return text('1')
@users_api.route('/start_two_step_verification', methods={'GET', 'POST', 'OPTIONS'})
async def start_two_step_verification(request: Request) -> HTTPResponse:
api_key, userid, password = get_args(request, 'api_key', 'userid', 'password')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.start_two_step_verification(userid, password)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/stop_two_step_verification', methods={'GET', 'POST', 'OPTIONS'})
async def stop_two_step_verification(request: Request) -> HTTPResponse:
api_key, userid, password = get_args(request, 'api_key', 'userid', 'password')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.stop_two_step_verification(userid, password)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/check_system_account_permission', methods={'GET', 'POST', 'OPTIONS'})
async def check_system_account_permission(request: Request) -> HTTPResponse:
api_key, userid, password, permission = get_args(request, 'api_key', 'userid', 'password', 'permission')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.check_system_account_permission(userid, password, permission)
if status:
return text('0')
else:
return text('1')
@users_api.route('/get_my_login_log', methods={'GET', 'POST', 'OPTIONS'})
async def get_my_login_log(request: Request) -> HTTPResponse:
api_key, userid, access_token = get_args(request, 'api_key', 'userid', 'access_token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_my_login_log(userid, access_token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_user_access_token', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_access_token(request: Request) -> HTTPResponse:
api_key, userid, password, target_userid = get_args(request, 'api_key', 'userid', 'password', 'target_userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_user_access_token(userid, password, target_userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/lock_my_account', methods={'GET', 'POST', 'OPTIONS'})
async def lock_my_account(request: Request) -> HTTPResponse:
api_key, userid, password = get_args(request, 'api_key', 'userid', 'password')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.lock_my_account(userid, password)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/lock_user_account', methods={'GET', 'POST', 'OPTIONS'})
async def lock_user_account(request: Request) -> HTTPResponse:
api_key, userid, password, target_userid = get_args(request, 'api_key', 'userid', 'password', 'target_userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.lock_user_account(userid, password, target_userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/reset_user_database', methods={'GET', 'POST', 'OPTIONS'})
async def reset_user_database(request: Request) -> HTTPResponse:
root_pass, api_key = get_args(request, 'root_pass', 'api_key')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
if not (isinstance(root_pass, str) and root_pass.isascii() and (8 <= len(root_pass) <= 32)):
raise Forbidden('root password format error.')
if not compare_digest(root_pass, app.moca_config.get('root_pass', '')):
raise Forbidden('invalid root password.')
await request.app.moca_users.reset_user_database()
return text('success.')
@users_api.route('/save_user_image', methods={'GET', 'POST', 'OPTIONS'})
async def save_user_image(request: Request) -> HTTPResponse:
api_key, userid, access_token, key, image = get_args(request, 'api_key', 'userid', 'access_token', 'key', 'image')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_user_image(
userid, access_token, key, image if isinstance(image, str) else image.body
)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_user_image', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_image(request: Request) -> HTTPResponse:
userid, key = get_args(request, 'userid', 'key')
status = await request.app.moca_users.get_user_image(userid, key)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else 'data:image/jpeg;base64,' + b64encode(status[1]).decode('utf-8')
})
@users_api.route('/get_user_image/<userid>/<key>', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_image_raw(request: Request, userid, key) -> HTTPResponse:
status = await request.app.moca_users.get_user_image(userid, key)
if status[0] == 0:
return raw(status[1], content_type='image/jpeg')
else:
raise NotFound('Not Found.')
@users_api.route('/save_big_icon', methods={'GET', 'POST', 'OPTIONS'})
async def save_big_icon(request: Request) -> HTTPResponse:
api_key, userid, access_token, icon = get_args(request, 'api_key', 'userid', 'access_token', 'icon')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_big_icon(
userid, access_token, icon if isinstance(icon, str) else icon.body
)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/save_small_icon', methods={'GET', 'POST', 'OPTIONS'})
async def save_small_icon(request: Request) -> HTTPResponse:
api_key, userid, access_token, icon = get_args(request, 'api_key', 'userid', 'access_token', 'icon')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_small_icon(
userid, access_token, icon if isinstance(icon, str) else icon.body
)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_big_icon', methods={'GET', 'POST', 'OPTIONS'})
async def get_big_icon(request: Request) -> HTTPResponse:
userid = get_args(request, 'userid')[0]
status = await request.app.moca_users.get_big_icon(userid)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else 'data:image/jpeg;base64,' + b64encode(status[1]).decode('utf-8')
})
@users_api.route('/get_small_icon', methods={'GET', 'POST', 'OPTIONS'})
async def get_small_icon(request: Request) -> HTTPResponse:
userid = get_args(request, 'userid')[0]
status = await request.app.moca_users.get_small_icon(userid)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else 'data:image/jpeg;base64,' + b64encode(status[1]).decode('utf-8')
})
@users_api.route('/get_big_icon/<userid>', methods={'GET', 'POST', 'OPTIONS'})
async def get_big_icon_raw(request: Request, userid) -> HTTPResponse:
status = await request.app.moca_users.get_big_icon(userid)
if status[0] == 0:
return raw(status[1], content_type='image/jpeg')
else:
raise NotFound('Not Found.')
@users_api.route('/get_small_icon/<userid>', methods={'GET', 'POST', 'OPTIONS'})
async def get_small_icon_raw(request: Request, userid) -> HTTPResponse:
status = await request.app.moca_users.get_small_icon(userid)
if status[0] == 0:
return raw(status[1], content_type='image/jpeg')
else:
raise NotFound('Not Found.')
@users_api.route('/save_user_file', methods={'GET', 'POST', 'OPTIONS'})
async def save_user_file(request: Request) -> HTTPResponse:
api_key, userid, access_token, key, data = get_args(request, 'api_key', 'userid', 'access_token', 'key', 'data')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_user_file(
userid, access_token, key, data if isinstance(data, str) else data.body
)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_user_file', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_file(request: Request) -> HTTPResponse:
api_key, userid, access_token, key = get_args(request, 'api_key', 'userid', 'access_token', 'key')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_user_file(
userid, access_token, key
)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else b64encode(status[1]).decode('utf-8'),
})
@users_api.route('/get_user_file/raw', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_file_raw(request: Request) -> HTTPResponse:
api_key, userid, access_token, key = get_args(request, 'api_key', 'userid', 'access_token', 'key')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_user_file(
userid, access_token, key
)
if status[0] == 0:
return raw(b64encode(status[1]).decode('utf-8'))
else:
raise NotFound('Not Found.')
@users_api.route('/send_message', methods={'GET', 'POST', 'OPTIONS'})
async def send_message(request: Request) -> HTTPResponse:
api_key, from_, to_, access_token, message = get_args(request, 'api_key', 'from', 'to', 'access_token', 'message')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.send_message(from_, to_, access_token, message)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_messages', methods={'GET', 'POST', 'OPTIONS'})
async def get_messages(request: Request) -> HTTPResponse:
api_key, userid, access_token, start, limit = get_args(
request, 'api_key', 'userid', 'access_token', ('start', int, 0), ('limit', int, 1024)
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_messages(userid, access_token, start, limit)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else [(item[0], item[1], item[2], str(item[3])) for item in status[1]]
})
@users_api.route('/change_password', methods={'GET', 'POST', 'OPTIONS'})
async def change_password(request: Request) -> HTTPResponse:
api_key, userid, old, new = get_args(request, 'api_key', 'userid', 'old', 'new')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.change_password(userid, old, new)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/has_verified_email', methods={'GET', 'POST', 'OPTIONS'})
async def has_verified_email(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.has_verified_email(userid)
if status:
return text('0')
else:
return text('1')
@users_api.route('/get_user_email_list', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_email_list(request: Request) -> HTTPResponse:
api_key, userid_list = get_args(request, 'api_key', 'userid_list')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.get_user_email_list(userid_list)
return json(data)
@users_api.route('/send_email_to_users', methods={'GET', 'POST', 'OPTIONS'})
async def send_email_to_users(request: Request) -> HTTPResponse:
api_key, userid_list, title, body = get_args(request, 'api_key', 'userid_list', 'title', 'body')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.send_email_to_users(userid_list, title, body)
return json(data)
@users_api.route('/get_user_phone_number', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_phone_number(request: Request) -> HTTPResponse:
api_key, userid_list = get_args(request, 'api_key', 'userid_list')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.get_user_phone_number(userid_list)
return json(data)
@users_api.route('/send_sms_to_users', methods={'GET', 'POST', 'OPTIONS'})
async def send_sms_to_users(request: Request) -> HTTPResponse:
api_key, userid_list, body = get_args(request, 'api_key', 'userid_list', 'body')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.send_sms_to_users(userid_list, body)
return json(data)
@users_api.route('/forgot_password', methods={'GET', 'POST', 'OPTIONS'})
async def forgot_password(request: Request) -> HTTPResponse:
api_key, userid = get_args(request, 'api_key', 'userid')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.forgot_password(userid)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/reset_password', methods={'GET', 'POST', 'OPTIONS'})
async def reset_password(request: Request) -> HTTPResponse:
api_key, userid, password, token = get_args(request, 'api_key', 'userid', 'password', 'token')
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.reset_password(userid, password, token)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/save_my_user_data', methods={'GET', 'POST', 'OPTIONS'})
async def save_my_user_data(request: Request) -> HTTPResponse:
api_key, userid, access_token, storage, data = get_args(
request, 'api_key', 'userid', 'access_token', ('storage', int), 'data'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_my_user_data(userid, access_token, storage, data)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_my_user_data', methods={'GET', 'POST', 'OPTIONS'})
async def get_my_user_data(request: Request) -> HTTPResponse:
api_key, userid, access_token, storage = get_args(
request, 'api_key', 'userid', 'access_token', ('storage', int)
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_my_user_data(userid, access_token, storage)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_other_user_data', methods={'GET', 'POST', 'OPTIONS'})
async def get_other_user_data(request: Request) -> HTTPResponse:
api_key, userid, target_userid, access_token, storage = get_args(
request, 'api_key', 'userid', 'target_userid', 'access_token', ('storage', int)
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_other_user_data(userid, access_token, target_userid, storage)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/save_other_user_data', methods={'GET', 'POST', 'OPTIONS'})
async def save_other_user_data(request: Request) -> HTTPResponse:
api_key, userid, target_userid, access_token, storage, data = get_args(
request, 'api_key', 'userid', 'target_userid', 'access_token', ('storage', int), 'data'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.save_other_user_data(userid, access_token, target_userid, storage, data)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_user_count', methods={'GET', 'POST', 'OPTIONS'})
async def get_user_count(request: Request) -> HTTPResponse:
api_key = get_args(request, 'api_key')[0]
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
count = await request.app.moca_users.get_user_count()
return text(str(count))
@users_api.route('/get_locked_user_number', methods={'GET', 'POST', 'OPTIONS'})
async def get_locked_user_number(request: Request) -> HTTPResponse:
api_key = get_args(request, 'api_key')[0]
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
count = await request.app.moca_users.get_locked_user_number()
return text(str(count))
@users_api.route('/get_users_list', methods={'GET', 'POST', 'OPTIONS'})
async def get_users_list(request: Request) -> HTTPResponse:
api_key, userid, password, start, limit = get_args(
request, 'api_key', 'userid', 'password', ('start', int), ('limit', int)
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-SM-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
data = await request.app.moca_users.get_users_list(userid, password, start, limit)
return json(data)
@users_api.route('/insert_data_to_storage', methods={'GET', 'POST', 'OPTIONS'})
async def insert_data_to_storage(request: Request) -> HTTPResponse:
api_key, userid, access_token, key, data = get_args(
request, 'api_key', 'userid', 'access_token', 'key', 'data'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.insert_data_to_storage(userid, access_token, key, data)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/select_data_from_storage', methods={'GET', 'POST', 'OPTIONS'})
async def select_data_from_storage(request: Request) -> HTTPResponse:
api_key, userid, access_token, key = get_args(
request, 'api_key', 'userid', 'access_token', 'key'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.select_data_from_storage(userid, access_token, key)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else [(item[0], item[1], str(item[2])) for item in status[1]],
})
@users_api.route('/delete_data_from_storage_by_id', methods={'GET', 'POST', 'OPTIONS'})
async def delete_data_from_storage_by_id(request: Request) -> HTTPResponse:
api_key, userid, access_token, content_id = get_args(
request, 'api_key', 'userid', 'access_token', ('content_id', int)
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.delete_data_from_storage_by_id(userid, access_token, content_id)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/delete_data_from_storage_by_key', methods={'GET', 'POST', 'OPTIONS'})
async def delete_data_from_storage_by_key(request: Request) -> HTTPResponse:
api_key, userid, access_token, key = get_args(
request, 'api_key', 'userid', 'access_token', 'key'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.delete_data_from_storage_by_key(userid, access_token, key)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/update_data_in_storage_by_id', methods={'GET', 'POST', 'OPTIONS'})
async def update_data_in_storage_by_id(request: Request) -> HTTPResponse:
api_key, userid, access_token, content_id, data = get_args(
request, 'api_key', 'userid', 'access_token', ('content_id', int), 'data'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.update_data_in_storage_by_id(userid, access_token, content_id, data)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/share_file', methods={'GET', 'POST', 'OPTIONS'})
async def share_file(request: Request) -> HTTPResponse:
api_key, userid, access_token, filename, data, protection, time_limit, info = get_args(
request, 'api_key', 'userid', 'access_token', 'filename', 'data', 'protection', ('time_limit', int), 'info'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.share_file(
userid, access_token, filename, data if isinstance(data, str) else data.body, protection, time_limit, info
)
return json({
'status_code': status[0],
'msg': status[1],
})
@users_api.route('/get_shared_file', methods={'GET', 'POST', 'OPTIONS'})
async def get_shared_file(request: Request) -> HTTPResponse:
api_key, key, protection = get_args(
request, 'api_key', 'key', 'protection'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_shared_file(key, protection)
return json({
'status_code': status[0],
'msg': status[1] if status[0] != 0 else [b64encode(status[1][0]).decode('utf-8'), status[1][1], status[1][2]],
})
@users_api.route('/get_shared_file/raw', methods={'GET', 'POST', 'OPTIONS'})
async def get_shared_file_raw(request: Request) -> HTTPResponse:
api_key, key, protection = get_args(
request, 'api_key', 'key', 'protection'
)
api_key_status = await request.app.moca_access.check_api_key(api_key, '-NR-', request)
if api_key_status[0] != 0:
raise Forbidden(api_key_status[1])
status = await request.app.moca_users.get_shared_file(key, protection)
if status[0] == 0:
return raw(status[1][0])
else:
raise NotFound('Not Found.')
# -------------------------------------------------------------------------- Routes --
| 42.544196
| 120
| 0.663271
| 5,507
| 39,949
| 4.549664
| 0.038678
| 0.098902
| 0.084774
| 0.094033
| 0.895111
| 0.877031
| 0.849012
| 0.784235
| 0.712672
| 0.664777
| 0
| 0.01159
| 0.170668
| 39,949
| 938
| 121
| 42.589552
| 0.7413
| 0.037648
| 0
| 0.604497
| 0
| 0
| 0.129331
| 0.013771
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.058201
| 0.015873
| 0
| 0.108466
| 0.003968
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
31c6dfccc80a10cc090fb55088f7e7415e98c892
| 27,415
|
py
|
Python
|
sdk/python/pulumi_google_native/cloudscheduler/v1/job.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/cloudscheduler/v1/job.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/cloudscheduler/v1/job.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['JobArgs', 'Job']
@pulumi.input_type
class JobArgs:
def __init__(__self__, *,
app_engine_http_target: Optional[pulumi.Input['AppEngineHttpTargetArgs']] = None,
attempt_deadline: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
http_target: Optional[pulumi.Input['HttpTargetArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_target: Optional[pulumi.Input['PubsubTargetArgs']] = None,
retry_config: Optional[pulumi.Input['RetryConfigArgs']] = None,
schedule: Optional[pulumi.Input[str]] = None,
time_zone: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Job resource.
:param pulumi.Input['AppEngineHttpTargetArgs'] app_engine_http_target: App Engine HTTP target.
:param pulumi.Input[str] attempt_deadline: The deadline for job attempts. If the request handler does not respond by this deadline then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in execution logs. Cloud Scheduler will retry the job according to the RetryConfig. The allowed duration for this deadline is: * For HTTP targets, between 15 seconds and 30 minutes. * For App Engine HTTP targets, between 15 seconds and 24 hours.
:param pulumi.Input[str] description: Optionally caller-specified in CreateJob or UpdateJob. A human-readable description for the job. This string must not contain more than 500 characters.
:param pulumi.Input['HttpTargetArgs'] http_target: HTTP target.
:param pulumi.Input[str] name: Optionally caller-specified in CreateJob, after which it becomes output only. The job name. For example: `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or periods (.). For more information, see [Identifying projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the canonical ID for the job's location. The list of available locations can be obtained by calling ListLocations. For more information, see https://cloud.google.com/about/locations/. * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores (_). The maximum length is 500 characters.
:param pulumi.Input['PubsubTargetArgs'] pubsub_target: Pub/Sub target.
:param pulumi.Input['RetryConfigArgs'] retry_config: Settings that determine the retry behavior.
:param pulumi.Input[str] schedule: Required, except when used with UpdateJob. Describes the schedule on which the job will be executed. The schedule can be either of the following types: * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) As a general rule, execution `n + 1` of a job will not begin until execution `n` has finished. Cloud Scheduler will never allow two simultaneously outstanding executions. For example, this implies that if the `n+1`th execution is scheduled to run at 16:00 but the `n`th execution takes until 16:15, the `n+1`th execution will not start until `16:15`. A scheduled start time will be delayed if the previous execution has not ended when its scheduled time occurs. If retry_count > 0 and a job attempt fails, the job will be tried a total of retry_count times, with exponential backoff, until the next scheduled start time.
:param pulumi.Input[str] time_zone: Specifies the time zone to be used in interpreting schedule. The value of this field must be a time zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). Note that some time zones include a provision for daylight savings time. The rules for daylight saving time are determined by the chosen tz. For UTC use the string "utc". If a time zone is not specified, the default will be in UTC (also known as GMT).
"""
if app_engine_http_target is not None:
pulumi.set(__self__, "app_engine_http_target", app_engine_http_target)
if attempt_deadline is not None:
pulumi.set(__self__, "attempt_deadline", attempt_deadline)
if description is not None:
pulumi.set(__self__, "description", description)
if http_target is not None:
pulumi.set(__self__, "http_target", http_target)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if pubsub_target is not None:
pulumi.set(__self__, "pubsub_target", pubsub_target)
if retry_config is not None:
pulumi.set(__self__, "retry_config", retry_config)
if schedule is not None:
pulumi.set(__self__, "schedule", schedule)
if time_zone is not None:
pulumi.set(__self__, "time_zone", time_zone)
@property
@pulumi.getter(name="appEngineHttpTarget")
def app_engine_http_target(self) -> Optional[pulumi.Input['AppEngineHttpTargetArgs']]:
"""
App Engine HTTP target.
"""
return pulumi.get(self, "app_engine_http_target")
@app_engine_http_target.setter
def app_engine_http_target(self, value: Optional[pulumi.Input['AppEngineHttpTargetArgs']]):
pulumi.set(self, "app_engine_http_target", value)
@property
@pulumi.getter(name="attemptDeadline")
def attempt_deadline(self) -> Optional[pulumi.Input[str]]:
"""
The deadline for job attempts. If the request handler does not respond by this deadline then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in execution logs. Cloud Scheduler will retry the job according to the RetryConfig. The allowed duration for this deadline is: * For HTTP targets, between 15 seconds and 30 minutes. * For App Engine HTTP targets, between 15 seconds and 24 hours.
"""
return pulumi.get(self, "attempt_deadline")
@attempt_deadline.setter
def attempt_deadline(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "attempt_deadline", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Optionally caller-specified in CreateJob or UpdateJob. A human-readable description for the job. This string must not contain more than 500 characters.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="httpTarget")
def http_target(self) -> Optional[pulumi.Input['HttpTargetArgs']]:
"""
HTTP target.
"""
return pulumi.get(self, "http_target")
@http_target.setter
def http_target(self, value: Optional[pulumi.Input['HttpTargetArgs']]):
pulumi.set(self, "http_target", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Optionally caller-specified in CreateJob, after which it becomes output only. The job name. For example: `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or periods (.). For more information, see [Identifying projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the canonical ID for the job's location. The list of available locations can be obtained by calling ListLocations. For more information, see https://cloud.google.com/about/locations/. * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores (_). The maximum length is 500 characters.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pubsubTarget")
def pubsub_target(self) -> Optional[pulumi.Input['PubsubTargetArgs']]:
"""
Pub/Sub target.
"""
return pulumi.get(self, "pubsub_target")
@pubsub_target.setter
def pubsub_target(self, value: Optional[pulumi.Input['PubsubTargetArgs']]):
pulumi.set(self, "pubsub_target", value)
@property
@pulumi.getter(name="retryConfig")
def retry_config(self) -> Optional[pulumi.Input['RetryConfigArgs']]:
"""
Settings that determine the retry behavior.
"""
return pulumi.get(self, "retry_config")
@retry_config.setter
def retry_config(self, value: Optional[pulumi.Input['RetryConfigArgs']]):
pulumi.set(self, "retry_config", value)
@property
@pulumi.getter
def schedule(self) -> Optional[pulumi.Input[str]]:
"""
Required, except when used with UpdateJob. Describes the schedule on which the job will be executed. The schedule can be either of the following types: * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) As a general rule, execution `n + 1` of a job will not begin until execution `n` has finished. Cloud Scheduler will never allow two simultaneously outstanding executions. For example, this implies that if the `n+1`th execution is scheduled to run at 16:00 but the `n`th execution takes until 16:15, the `n+1`th execution will not start until `16:15`. A scheduled start time will be delayed if the previous execution has not ended when its scheduled time occurs. If retry_count > 0 and a job attempt fails, the job will be tried a total of retry_count times, with exponential backoff, until the next scheduled start time.
"""
return pulumi.get(self, "schedule")
@schedule.setter
def schedule(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "schedule", value)
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the time zone to be used in interpreting schedule. The value of this field must be a time zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). Note that some time zones include a provision for daylight savings time. The rules for daylight saving time are determined by the chosen tz. For UTC use the string "utc". If a time zone is not specified, the default will be in UTC (also known as GMT).
"""
return pulumi.get(self, "time_zone")
@time_zone.setter
def time_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_zone", value)
class Job(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
app_engine_http_target: Optional[pulumi.Input[pulumi.InputType['AppEngineHttpTargetArgs']]] = None,
attempt_deadline: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
http_target: Optional[pulumi.Input[pulumi.InputType['HttpTargetArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_target: Optional[pulumi.Input[pulumi.InputType['PubsubTargetArgs']]] = None,
retry_config: Optional[pulumi.Input[pulumi.InputType['RetryConfigArgs']]] = None,
schedule: Optional[pulumi.Input[str]] = None,
time_zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a job.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['AppEngineHttpTargetArgs']] app_engine_http_target: App Engine HTTP target.
:param pulumi.Input[str] attempt_deadline: The deadline for job attempts. If the request handler does not respond by this deadline then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in execution logs. Cloud Scheduler will retry the job according to the RetryConfig. The allowed duration for this deadline is: * For HTTP targets, between 15 seconds and 30 minutes. * For App Engine HTTP targets, between 15 seconds and 24 hours.
:param pulumi.Input[str] description: Optionally caller-specified in CreateJob or UpdateJob. A human-readable description for the job. This string must not contain more than 500 characters.
:param pulumi.Input[pulumi.InputType['HttpTargetArgs']] http_target: HTTP target.
:param pulumi.Input[str] name: Optionally caller-specified in CreateJob, after which it becomes output only. The job name. For example: `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or periods (.). For more information, see [Identifying projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the canonical ID for the job's location. The list of available locations can be obtained by calling ListLocations. For more information, see https://cloud.google.com/about/locations/. * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores (_). The maximum length is 500 characters.
:param pulumi.Input[pulumi.InputType['PubsubTargetArgs']] pubsub_target: Pub/Sub target.
:param pulumi.Input[pulumi.InputType['RetryConfigArgs']] retry_config: Settings that determine the retry behavior.
:param pulumi.Input[str] schedule: Required, except when used with UpdateJob. Describes the schedule on which the job will be executed. The schedule can be either of the following types: * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) As a general rule, execution `n + 1` of a job will not begin until execution `n` has finished. Cloud Scheduler will never allow two simultaneously outstanding executions. For example, this implies that if the `n+1`th execution is scheduled to run at 16:00 but the `n`th execution takes until 16:15, the `n+1`th execution will not start until `16:15`. A scheduled start time will be delayed if the previous execution has not ended when its scheduled time occurs. If retry_count > 0 and a job attempt fails, the job will be tried a total of retry_count times, with exponential backoff, until the next scheduled start time.
:param pulumi.Input[str] time_zone: Specifies the time zone to be used in interpreting schedule. The value of this field must be a time zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). Note that some time zones include a provision for daylight savings time. The rules for daylight saving time are determined by the chosen tz. For UTC use the string "utc". If a time zone is not specified, the default will be in UTC (also known as GMT).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[JobArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a job.
:param str resource_name: The name of the resource.
:param JobArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(JobArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
app_engine_http_target: Optional[pulumi.Input[pulumi.InputType['AppEngineHttpTargetArgs']]] = None,
attempt_deadline: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
http_target: Optional[pulumi.Input[pulumi.InputType['HttpTargetArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_target: Optional[pulumi.Input[pulumi.InputType['PubsubTargetArgs']]] = None,
retry_config: Optional[pulumi.Input[pulumi.InputType['RetryConfigArgs']]] = None,
schedule: Optional[pulumi.Input[str]] = None,
time_zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = JobArgs.__new__(JobArgs)
__props__.__dict__["app_engine_http_target"] = app_engine_http_target
__props__.__dict__["attempt_deadline"] = attempt_deadline
__props__.__dict__["description"] = description
__props__.__dict__["http_target"] = http_target
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["pubsub_target"] = pubsub_target
__props__.__dict__["retry_config"] = retry_config
__props__.__dict__["schedule"] = schedule
__props__.__dict__["time_zone"] = time_zone
__props__.__dict__["last_attempt_time"] = None
__props__.__dict__["schedule_time"] = None
__props__.__dict__["state"] = None
__props__.__dict__["status"] = None
__props__.__dict__["user_update_time"] = None
super(Job, __self__).__init__(
'google-native:cloudscheduler/v1:Job',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Job':
"""
Get an existing Job resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = JobArgs.__new__(JobArgs)
__props__.__dict__["app_engine_http_target"] = None
__props__.__dict__["attempt_deadline"] = None
__props__.__dict__["description"] = None
__props__.__dict__["http_target"] = None
__props__.__dict__["last_attempt_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["pubsub_target"] = None
__props__.__dict__["retry_config"] = None
__props__.__dict__["schedule"] = None
__props__.__dict__["schedule_time"] = None
__props__.__dict__["state"] = None
__props__.__dict__["status"] = None
__props__.__dict__["time_zone"] = None
__props__.__dict__["user_update_time"] = None
return Job(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="appEngineHttpTarget")
def app_engine_http_target(self) -> pulumi.Output['outputs.AppEngineHttpTargetResponse']:
"""
App Engine HTTP target.
"""
return pulumi.get(self, "app_engine_http_target")
@property
@pulumi.getter(name="attemptDeadline")
def attempt_deadline(self) -> pulumi.Output[str]:
"""
The deadline for job attempts. If the request handler does not respond by this deadline then the request is cancelled and the attempt is marked as a `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in execution logs. Cloud Scheduler will retry the job according to the RetryConfig. The allowed duration for this deadline is: * For HTTP targets, between 15 seconds and 30 minutes. * For App Engine HTTP targets, between 15 seconds and 24 hours.
"""
return pulumi.get(self, "attempt_deadline")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
Optionally caller-specified in CreateJob or UpdateJob. A human-readable description for the job. This string must not contain more than 500 characters.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="httpTarget")
def http_target(self) -> pulumi.Output['outputs.HttpTargetResponse']:
"""
HTTP target.
"""
return pulumi.get(self, "http_target")
@property
@pulumi.getter(name="lastAttemptTime")
def last_attempt_time(self) -> pulumi.Output[str]:
"""
The time the last job attempt started.
"""
return pulumi.get(self, "last_attempt_time")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Optionally caller-specified in CreateJob, after which it becomes output only. The job name. For example: `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or periods (.). For more information, see [Identifying projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) * `LOCATION_ID` is the canonical ID for the job's location. The list of available locations can be obtained by calling ListLocations. For more information, see https://cloud.google.com/about/locations/. * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), or underscores (_). The maximum length is 500 characters.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pubsubTarget")
def pubsub_target(self) -> pulumi.Output['outputs.PubsubTargetResponse']:
"""
Pub/Sub target.
"""
return pulumi.get(self, "pubsub_target")
@property
@pulumi.getter(name="retryConfig")
def retry_config(self) -> pulumi.Output['outputs.RetryConfigResponse']:
"""
Settings that determine the retry behavior.
"""
return pulumi.get(self, "retry_config")
@property
@pulumi.getter
def schedule(self) -> pulumi.Output[str]:
"""
Required, except when used with UpdateJob. Describes the schedule on which the job will be executed. The schedule can be either of the following types: * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) * English-like [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) As a general rule, execution `n + 1` of a job will not begin until execution `n` has finished. Cloud Scheduler will never allow two simultaneously outstanding executions. For example, this implies that if the `n+1`th execution is scheduled to run at 16:00 but the `n`th execution takes until 16:15, the `n+1`th execution will not start until `16:15`. A scheduled start time will be delayed if the previous execution has not ended when its scheduled time occurs. If retry_count > 0 and a job attempt fails, the job will be tried a total of retry_count times, with exponential backoff, until the next scheduled start time.
"""
return pulumi.get(self, "schedule")
@property
@pulumi.getter(name="scheduleTime")
def schedule_time(self) -> pulumi.Output[str]:
"""
The next time the job is scheduled. Note that this may be a retry of a previously failed attempt or the next execution time according to the schedule.
"""
return pulumi.get(self, "schedule_time")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
State of the job.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def status(self) -> pulumi.Output['outputs.StatusResponse']:
"""
The response from the target for the last attempted execution.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="timeZone")
def time_zone(self) -> pulumi.Output[str]:
"""
Specifies the time zone to be used in interpreting schedule. The value of this field must be a time zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). Note that some time zones include a provision for daylight savings time. The rules for daylight saving time are determined by the chosen tz. For UTC use the string "utc". If a time zone is not specified, the default will be in UTC (also known as GMT).
"""
return pulumi.get(self, "time_zone")
@property
@pulumi.getter(name="userUpdateTime")
def user_update_time(self) -> pulumi.Output[str]:
"""
The creation time of the job.
"""
return pulumi.get(self, "user_update_time")
| 62.734554
| 969
| 0.686777
| 3,566
| 27,415
| 5.102916
| 0.09198
| 0.045942
| 0.057427
| 0.042315
| 0.836896
| 0.798648
| 0.75996
| 0.733528
| 0.713414
| 0.646205
| 0
| 0.006402
| 0.213752
| 27,415
| 436
| 970
| 62.87844
| 0.83781
| 0.486449
| 0
| 0.429091
| 1
| 0
| 0.131515
| 0.031727
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152727
| false
| 0.003636
| 0.029091
| 0.007273
| 0.283636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ec3f2634a7dd042e7d4e8f41649febe639597b8
| 65
|
py
|
Python
|
oswald/tools/generatePlainAPI.py
|
ms7m/Oswald
|
7b84f7d97a02050ec266e77071a3214f69d8f4ac
|
[
"MIT"
] | 2
|
2019-11-18T18:45:26.000Z
|
2019-11-18T18:49:34.000Z
|
oswald/tools/generatePlainAPI.py
|
ms7m/Oswald
|
7b84f7d97a02050ec266e77071a3214f69d8f4ac
|
[
"MIT"
] | null | null | null |
oswald/tools/generatePlainAPI.py
|
ms7m/Oswald
|
7b84f7d97a02050ec266e77071a3214f69d8f4ac
|
[
"MIT"
] | null | null | null |
import falcon
def generate_plain_api():
return falcon.API()
| 13
| 25
| 0.738462
| 9
| 65
| 5.111111
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169231
| 65
| 5
| 26
| 13
| 0.851852
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
9ed02c92bbd8f6b71774d1d28efc82a1724f76f2
| 3,185
|
py
|
Python
|
simpletodolist/views.py
|
ericchan2012/todolist
|
fb77504f6545de16ae63f0cc4fc9b68dc2dce4b6
|
[
"Apache-2.0"
] | null | null | null |
simpletodolist/views.py
|
ericchan2012/todolist
|
fb77504f6545de16ae63f0cc4fc9b68dc2dce4b6
|
[
"Apache-2.0"
] | null | null | null |
simpletodolist/views.py
|
ericchan2012/todolist
|
fb77504f6545de16ae63f0cc4fc9b68dc2dce4b6
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.contrib.auth.models import User
from django.http import Http404
from simpletodolist.models import Todo
def todolist(request):
todolist = Todo.objects.filter(flag=1)
finishtodos = Todo.objects.filter(flag=0)
return render_to_response('simpleTodo.html',
{'todolist': todolist, 'finishtodos': finishtodos},
context_instance=RequestContext(request))
def todofinish(request, id=''):
todo = Todo.objects.get(id=id)
if todo.flag == '1':
todo.flag = '0'
todo.save()
return HttpResponseRedirect('/simpleTodo/')
todolist = Todo.objects.filter(flag=1)
return render_to_response('simpleTodo.html', {'todolist': todolist},
context_instance=RequestContext(request))
def todoback(request, id=''):
todo = Todo.objects.get(id=id)
if todo.flag == '0':
todo.flag = '1'
todo.save()
return HttpResponseRedirect('/simpleTodo/')
todolist = Todo.objects.filter(flag=1)
return render_to_response('simpleTodo.html', {'todolist': todolist},
context_instance=RequestContext(request))
def tododelete(request, id=''):
try:
todo = Todo.objects.get(id=id)
except Exception:
raise Http404
if todo:
todo.delete()
return HttpResponseRedirect('/simpleTodo/')
todolist = Todo.objects.filter(flag=1)
return render_to_response('simpleTodo.html', {'todolist': todolist},
context_instance=RequestContext(request))
def addTodo(request):
if request.method == 'POST':
atodo = request.POST['todo']
priority = request.POST['priority']
user = User.objects.get(id='1')
todo = Todo(user=user, todo=atodo, priority=priority, flag='1')
todo.save()
todolist = Todo.objects.filter(flag='1')
finishtodos = Todo.objects.filter(flag=0)
return render_to_response('showtodo.html',
{'todolist': todolist, 'finishtodos': finishtodos},
context_instance=RequestContext(request))
else:
todolist = Todo.objects.filter(flag=1)
finishtodos = Todo.objects.filter(flag=0)
return render_to_response('simpleTodo.html',
{'todolist': todolist, 'finishtodos': finishtodos})
def updatetodo(request, id=''):
if request.method == 'POST':
print 'ddd'
atodo = request.POST['todo']
priority = request.POST['priority']
user = User.objects.get(id='1')
todo = Todo(user=user, todo=atodo, priority=priority, flag='1')
todo.save()
todolist = Todo.objects.filter(flag='1')
finishtodos = Todo.objects.filter(flag=0)
return render_to_response('simpleTodo.html',
{'todolist': todolist, 'finishtodos': finishtodos},
context_instance=RequestContext(request))
else:
try:
todo = Todo.objects.get(id=id)
except Exception:
raise Http404
return render_to_response('updatatodo.html', {'todo': todo},
context_instance=RequestContext(request))
| 35.388889
| 72
| 0.652119
| 351
| 3,185
| 5.846154
| 0.156695
| 0.080409
| 0.091131
| 0.112573
| 0.767057
| 0.765595
| 0.765595
| 0.765595
| 0.765595
| 0.765595
| 0
| 0.011277
| 0.220408
| 3,185
| 90
| 73
| 35.388889
| 0.815143
| 0
| 0
| 0.717949
| 0
| 0
| 0.095104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.076923
| null | null | 0.012821
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ee58eb9897b171a674cf5775d948909203cda6b
| 7,554
|
py
|
Python
|
tests/test_command_json_to_graph.py
|
matusmarhefka/oval-graph
|
c63886c41f3e9655befe633b900861dcde7665cb
|
[
"Apache-2.0"
] | null | null | null |
tests/test_command_json_to_graph.py
|
matusmarhefka/oval-graph
|
c63886c41f3e9655befe633b900861dcde7665cb
|
[
"Apache-2.0"
] | null | null | null |
tests/test_command_json_to_graph.py
|
matusmarhefka/oval-graph
|
c63886c41f3e9655befe633b900861dcde7665cb
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import re
import subprocess
import pexpect
import pytest
from readchar import key
import tests.any_test_help
@pytest.mark.usefixtures("remove_generated_reports_in_root")
def test_command_json_to_graph():
src = tests.any_test_help.get_random_dir_in_tmp() + '.json'
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'arf-to-json',
'tests/test_data/ssg-fedora-ds-arf.xml',
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
])
with open(src, "w+") as f:
f.writelines(out.decode('utf-8'))
subprocess.check_call(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
'-o', '.',
src,
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
], cwd='./')
file_src = tests.any_test_help.find_files(
"graph-of-xccdf_org.ssgproject.content_rule_package_abrt_removed",
'../')
tests.any_test_help.compare_results_html(file_src[0])
@pytest.mark.usefixtures("remove_generated_reports_in_root")
def test_command_json_to_graph_with_verbose():
src = tests.any_test_help.get_random_dir_in_tmp() + '.json'
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'arf-to-json',
'tests/test_data/ssg-fedora-ds-arf.xml',
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
])
with open(src, "w+") as f:
f.writelines(out.decode('utf-8'))
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
'-o', '.',
'--verbose',
src,
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
],
cwd='./',
stderr=subprocess.STDOUT)
src_regex = r"\"(\.\/.*?)\""
src = re.search(src_regex, out.decode('utf-8')).group(1)
tests.any_test_help.compare_results_html('.' + src)
def test_command_json_to_graph_is_tty():
src = tests.any_test_help.get_random_dir_in_tmp() + '.json'
with open(src, 'w+')as output:
subprocess.check_call(['python3',
'-m',
'oval_graph.command_line',
'arf-to-json',
'tests/test_data/ssg-fedora-ds-arf.xml',
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
],
stdout=output)
out_dir = tests.any_test_help.get_random_dir_in_tmp()
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
'--out',
out_dir,
src,
'xccdf_org.ssgproject.content_rule_package_abrt_removed'
])
tests.any_test_help.compare_results_html(
os.path.join(out_dir, os.listdir(out_dir)[0]))
def test_inquirer_choice_rule():
src = tests.any_test_help.get_random_dir_in_tmp() + '.json'
sut = pexpect.spawn('python3',
['-m',
'oval_graph.command_line',
'arf-to-json',
'tests/test_data/ssg-fedora-ds-arf.xml',
r'_package_\w+_removed'
])
sut.expect(r'\w+')
sut.send(key.DOWN)
sut.send(key.SPACE)
sut.send(key.UP)
sut.send(key.SPACE)
sut.send(key.ENTER)
sut.wait()
out = sut.readlines()
with open(src, "w+") as f:
f.writelines(row.decode("utf-8") for row in out[20:])
tests.any_test_help.compare_results_json(src)
out_dir = tests.any_test_help.get_random_dir_in_tmp()
sut = pexpect.spawn('python3',
['-m',
'oval_graph.command_line',
'json-to-graph',
'-o',
out_dir,
src,
'.'
])
sut.expect(r'\w+')
sut.send(key.DOWN)
sut.send(key.SPACE)
sut.send(key.ENTER)
sut.wait()
assert len(os.listdir(out_dir)) == 1
assert ("xccdf_org.ssgproject.content_rule_package_abrt_removed"
in os.listdir(out_dir)[0])
def test_command_parameter_all():
src = tests.any_test_help.get_random_dir_in_tmp() + '.json'
with open(src, 'w+')as output:
subprocess.check_call(['python3',
'-m',
'oval_graph.command_line',
'arf-to-json',
'--all',
'tests/test_data/ssg-fedora-ds-arf.xml',
'.'
],
stdout=output)
with open(src, "r") as f:
rules = json.load(f)
assert len(rules.keys()) == 184
out_dir = tests.any_test_help.get_random_dir_in_tmp()
subprocess.check_call(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
src,
'.',
'--all',
'-o',
out_dir
])
assert len(os.listdir(out_dir)) == 184
def test_bad_command_json_to_graph_with_verbose():
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
'-v',
'tests/test_data/ssg-fedora-ds-arf.xml',
'.'
],
stderr=subprocess.STDOUT)
out_string = out.decode('utf-8')
assert out_string.find("Traceback") > -1
assert out_string.find("Error:") > -1
def test_bad_command_json_to_graph():
out = subprocess.check_output(['python3',
'-m',
'oval_graph.command_line',
'json-to-graph',
'tests/test_data/ssg-fedora-ds-arf.xml',
'.'
],
stderr=subprocess.STDOUT)
out_string = out.decode('utf-8')
assert out_string.find("Traceback") == -1
assert out_string.find("Error:") > -1
| 39.757895
| 91
| 0.435796
| 721
| 7,554
| 4.266297
| 0.15534
| 0.036411
| 0.054616
| 0.072822
| 0.849805
| 0.843628
| 0.802991
| 0.758778
| 0.725293
| 0.714564
| 0
| 0.008491
| 0.454329
| 7,554
| 189
| 92
| 39.968254
| 0.737749
| 0
| 0
| 0.736842
| 1
| 0
| 0.194069
| 0.137675
| 0
| 0
| 0
| 0
| 0.046784
| 1
| 0.040936
| false
| 0
| 0.046784
| 0
| 0.087719
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7354b8201a482a561ae0ad7541c4d436f9b2ba70
| 5,267
|
py
|
Python
|
parsifal/reviews/decorators.py
|
glauberferreira/parsifal-mec
|
66f85e0d48a270bddd1170caa2131bc74872462d
|
[
"MIT"
] | 1
|
2019-06-13T16:09:26.000Z
|
2019-06-13T16:09:26.000Z
|
parsifal/reviews/decorators.py
|
glauberferreira/parsifal-mec
|
66f85e0d48a270bddd1170caa2131bc74872462d
|
[
"MIT"
] | null | null | null |
parsifal/reviews/decorators.py
|
glauberferreira/parsifal-mec
|
66f85e0d48a270bddd1170caa2131bc74872462d
|
[
"MIT"
] | 3
|
2019-10-05T04:16:59.000Z
|
2021-04-20T05:00:50.000Z
|
from functools import wraps
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseForbidden, HttpResponseBadRequest, Http404
from parsifal.reviews.models import Review
def main_author_required(f):
def wrap(request, *args, **kwargs):
if 'review_name' in kwargs and 'username' in kwargs:
try:
review = Review.objects.get(name=kwargs['review_name'], author__username__iexact=kwargs['username'])
if review.author.id == request.user.id:
return f(request, *args, **kwargs)
else:
raise Http404
except Review.DoesNotExist:
raise Http404
else:
try:
review_id = request.POST['review-id']
except:
try:
review_id = request.GET['review-id']
except:
return HttpResponseBadRequest()
review = Review.objects.get(pk=review_id)
if review.author.id == request.user.id:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
wrap.__doc__=f.__doc__
wrap.__name__=f.__name__
return wrap
def author_required(f):
def wrap(request, *args, **kwargs):
if 'review_name' in kwargs and 'username' in kwargs:
try:
review = Review.objects.get(name=kwargs['review_name'], author__username__iexact=kwargs['username'])
if review.is_author_or_coauthor(request.user):
return f(request, *args, **kwargs)
else:
raise Http404
except Review.DoesNotExist:
raise Http404
else:
try:
review_id = request.POST['review-id']
except:
try:
review_id = request.GET['review-id']
except:
return HttpResponseBadRequest()
review = Review.objects.get(pk=review_id)
if review.is_author_or_coauthor(request.user):
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
wrap.__doc__=f.__doc__
wrap.__name__=f.__name__
return wrap
def visitor_required(f):
def wrap(request, *args, **kwargs):
if 'review_name' in kwargs and 'username' in kwargs:
try:
review = Review.objects.get(name=kwargs['review_name'], author__username__iexact=kwargs['username'])
if review.is_visitor(request.user):
return f(request, *args, **kwargs)
else:
raise Http404
except Review.DoesNotExist:
raise Http404
else:
try:
review_id = request.POST['review-id']
except:
try:
review_id = request.GET['review-id']
except:
return HttpResponseBadRequest()
review = Review.objects.get(pk=review_id)
if review.is_visitor(request.user):
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
wrap.__doc__=f.__doc__
wrap.__name__=f.__name__
return wrap
def author_or_visitor_required(f):
def wrap(request, *args, **kwargs):
if 'review_name' in kwargs and 'username' in kwargs:
try:
review = Review.objects.get(name=kwargs['review_name'], author__username__iexact=kwargs['username'])
if request.method == 'POST':
if review.is_author_or_coauthor(request.user):
print '1'
return f(request, *args, **kwargs)
else:
raise Http404
else:
if review.is_author_or_coauthor(request.user) or review.is_visitors(request.user) or review.export_protocol:
print '4'
return f(request, *args, **kwargs)
else:
raise Http404
except Review.DoesNotExist:
raise Http404
else:
try:
review_id = request.POST['review-id']
except:
try:
review_id = request.GET['review-id']
except:
return HttpResponseBadRequest()
review = Review.objects.get(pk=review_id)
print request.method
if request.method == 'POST':
print '2'
if review.is_author_or_coauthor(request.user):
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
else:
print '3'
if review.is_author_or_coauthor(request.user) or review.is_visitors(request.user) or review.export_protocol:
return f(request, *args, **kwargs)
else:
return HttpResponseForbidden()
wrap.__doc__=f.__doc__
wrap.__name__=f.__name__
return wrap
| 37.892086
| 128
| 0.529334
| 514
| 5,267
| 5.169261
| 0.11284
| 0.060218
| 0.089575
| 0.067746
| 0.895747
| 0.895747
| 0.895747
| 0.895747
| 0.866767
| 0.866767
| 0
| 0.010478
| 0.3839
| 5,267
| 138
| 129
| 38.166667
| 0.80832
| 0
| 0
| 0.899225
| 0
| 0
| 0.044807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.031008
| null | null | 0.03876
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b449a198bf2ac5aa452f5f06ebc1d96ec1e74bed
| 623
|
py
|
Python
|
cells/hht.py
|
NathanKlineInstitute/SMARTAgent
|
751c880c43d73eca395b5533f6f7fe56bf5816d4
|
[
"MIT"
] | null | null | null |
cells/hht.py
|
NathanKlineInstitute/SMARTAgent
|
751c880c43d73eca395b5533f6f7fe56bf5816d4
|
[
"MIT"
] | null | null | null |
cells/hht.py
|
NathanKlineInstitute/SMARTAgent
|
751c880c43d73eca395b5533f6f7fe56bf5816d4
|
[
"MIT"
] | null | null | null |
from neuron import h
class HHE:
def __init__ (self):
self.soma = soma = h.Section(name='soma',cell=self)
soma.diam=soma.L=18.8
soma.Ra=123
soma.insert('hh')
soma(.5).hh.gnabar=.12
soma(.5).hh.gkbar=0.036
soma(.5).hh.gl=0.003
soma(.5).hh.el=-70
class HHI:
def __init__ (self):
self.soma = soma = h.Section(name='soma',cell=self)
soma.diam=soma.L=18.8
soma.Ra=123
soma.insert('hh')
soma(.5).hh.gnabar=.12
soma(.5).hh.gkbar=0.036
soma(.5).hh.gl=0.003
soma(.5).hh.el=-70
| 23.961538
| 59
| 0.512039
| 100
| 623
| 3.11
| 0.32
| 0.128617
| 0.180064
| 0.096463
| 0.893891
| 0.893891
| 0.893891
| 0.893891
| 0.893891
| 0.893891
| 0
| 0.102564
| 0.311396
| 623
| 25
| 60
| 24.92
| 0.622378
| 0
| 0
| 0.857143
| 0
| 0
| 0.019262
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.047619
| 0
| 0.238095
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b45e76f12221d7e7a79873e25de7d1262b2baffa
| 20,557
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysadmin_time_of_day_timezone.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysadmin_time_of_day_timezone.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_sysadmin_time_of_day_timezone.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_sysadmin_time_of_day_timezone
This module contains a collection of YANG definitions
for Cisco IOS\-XR syadmin TOD configuration and cli.
This module contains definitions
for the following management objects\:
Time of the Day(TOD) Cli and configuration data
Copyright (c) 2013\-2017 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Clock(Entity):
"""
.. attribute:: timezone
**type**\: :py:class:`Timezone <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Clock.Timezone>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Clock, self).__init__()
self._top_entity = None
self.yang_name = "clock"
self.yang_parent_name = "Cisco-IOS-XR-sysadmin-time-of-day-timezone"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("timezone", ("timezone", Clock.Timezone))])
self._leafs = OrderedDict()
self.timezone = Clock.Timezone()
self.timezone.parent = self
self._children_name_map["timezone"] = "timezone"
self._segment_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:clock"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Clock, [], name, value)
class Timezone(Entity):
"""
.. attribute:: tzname
**type**\: str
.. attribute:: area
**type**\: str
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Clock.Timezone, self).__init__()
self.yang_name = "timezone"
self.yang_parent_name = "clock"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('tzname', (YLeaf(YType.str, 'tzname'), ['str'])),
('area', (YLeaf(YType.str, 'area'), ['str'])),
])
self.tzname = None
self.area = None
self._segment_path = lambda: "timezone"
self._absolute_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:clock/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Clock.Timezone, ['tzname', 'area'], name, value)
def clone_ptr(self):
self._top_entity = Clock()
return self._top_entity
class Trace(Entity):
"""
.. attribute:: timezone_config
**type**\: :py:class:`TimezoneConfig <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneConfig>`
.. attribute:: timezone_notify
**type**\: :py:class:`TimezoneNotify <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneNotify>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace, self).__init__()
self._top_entity = None
self.yang_name = "trace"
self.yang_parent_name = "Cisco-IOS-XR-sysadmin-time-of-day-timezone"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("timezone_config", ("timezone_config", Trace.TimezoneConfig)), ("timezone_notify", ("timezone_notify", Trace.TimezoneNotify))])
self._leafs = OrderedDict()
self.timezone_config = Trace.TimezoneConfig()
self.timezone_config.parent = self
self._children_name_map["timezone_config"] = "timezone_config"
self.timezone_notify = Trace.TimezoneNotify()
self.timezone_notify.parent = self
self._children_name_map["timezone_notify"] = "timezone_notify"
self._segment_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:trace"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace, [], name, value)
class TimezoneConfig(Entity):
"""
.. attribute:: trace
show traceable processes
**type**\: list of :py:class:`Trace_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneConfig.Trace_>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneConfig, self).__init__()
self.yang_name = "timezone_config"
self.yang_parent_name = "trace"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("trace", ("trace", Trace.TimezoneConfig.Trace_))])
self._leafs = OrderedDict()
self.trace = YList(self)
self._segment_path = lambda: "timezone_config"
self._absolute_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:trace/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneConfig, [], name, value)
class Trace_(Entity):
"""
show traceable processes
.. attribute:: buffer (key)
**type**\: str
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneConfig.Trace_.Location>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneConfig.Trace_, self).__init__()
self.yang_name = "trace"
self.yang_parent_name = "timezone_config"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['buffer']
self._child_classes = OrderedDict([("location", ("location", Trace.TimezoneConfig.Trace_.Location))])
self._leafs = OrderedDict([
('buffer', (YLeaf(YType.str, 'buffer'), ['str'])),
])
self.buffer = None
self.location = YList(self)
self._segment_path = lambda: "trace" + "[buffer='" + str(self.buffer) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:trace/timezone_config/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneConfig.Trace_, [u'buffer'], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
.. attribute:: all_options
**type**\: list of :py:class:`AllOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneConfig.Trace_.Location.AllOptions>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneConfig.Trace_.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "trace"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_classes = OrderedDict([("all-options", ("all_options", Trace.TimezoneConfig.Trace_.Location.AllOptions))])
self._leafs = OrderedDict([
('location_name', (YLeaf(YType.str, 'location_name'), ['str'])),
])
self.location_name = None
self.all_options = YList(self)
self._segment_path = lambda: "location" + "[location_name='" + str(self.location_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneConfig.Trace_.Location, [u'location_name'], name, value)
class AllOptions(Entity):
"""
.. attribute:: option (key)
**type**\: str
.. attribute:: trace_blocks
**type**\: list of :py:class:`TraceBlocks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneConfig.Trace_.Location.AllOptions.TraceBlocks>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneConfig.Trace_.Location.AllOptions, self).__init__()
self.yang_name = "all-options"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['option']
self._child_classes = OrderedDict([("trace-blocks", ("trace_blocks", Trace.TimezoneConfig.Trace_.Location.AllOptions.TraceBlocks))])
self._leafs = OrderedDict([
('option', (YLeaf(YType.str, 'option'), ['str'])),
])
self.option = None
self.trace_blocks = YList(self)
self._segment_path = lambda: "all-options" + "[option='" + str(self.option) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneConfig.Trace_.Location.AllOptions, [u'option'], name, value)
class TraceBlocks(Entity):
"""
.. attribute:: data
Trace output block
**type**\: str
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneConfig.Trace_.Location.AllOptions.TraceBlocks, self).__init__()
self.yang_name = "trace-blocks"
self.yang_parent_name = "all-options"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('data', (YLeaf(YType.str, 'data'), ['str'])),
])
self.data = None
self._segment_path = lambda: "trace-blocks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneConfig.Trace_.Location.AllOptions.TraceBlocks, [u'data'], name, value)
class TimezoneNotify(Entity):
"""
.. attribute:: trace
show traceable processes
**type**\: list of :py:class:`Trace_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneNotify.Trace_>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneNotify, self).__init__()
self.yang_name = "timezone_notify"
self.yang_parent_name = "trace"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("trace", ("trace", Trace.TimezoneNotify.Trace_))])
self._leafs = OrderedDict()
self.trace = YList(self)
self._segment_path = lambda: "timezone_notify"
self._absolute_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:trace/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneNotify, [], name, value)
class Trace_(Entity):
"""
show traceable processes
.. attribute:: buffer (key)
**type**\: str
.. attribute:: location
**type**\: list of :py:class:`Location <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneNotify.Trace_.Location>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneNotify.Trace_, self).__init__()
self.yang_name = "trace"
self.yang_parent_name = "timezone_notify"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['buffer']
self._child_classes = OrderedDict([("location", ("location", Trace.TimezoneNotify.Trace_.Location))])
self._leafs = OrderedDict([
('buffer', (YLeaf(YType.str, 'buffer'), ['str'])),
])
self.buffer = None
self.location = YList(self)
self._segment_path = lambda: "trace" + "[buffer='" + str(self.buffer) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-sysadmin-time-of-day-timezone:trace/timezone_notify/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneNotify.Trace_, [u'buffer'], name, value)
class Location(Entity):
"""
.. attribute:: location_name (key)
**type**\: str
.. attribute:: all_options
**type**\: list of :py:class:`AllOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneNotify.Trace_.Location.AllOptions>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneNotify.Trace_.Location, self).__init__()
self.yang_name = "location"
self.yang_parent_name = "trace"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['location_name']
self._child_classes = OrderedDict([("all-options", ("all_options", Trace.TimezoneNotify.Trace_.Location.AllOptions))])
self._leafs = OrderedDict([
('location_name', (YLeaf(YType.str, 'location_name'), ['str'])),
])
self.location_name = None
self.all_options = YList(self)
self._segment_path = lambda: "location" + "[location_name='" + str(self.location_name) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneNotify.Trace_.Location, [u'location_name'], name, value)
class AllOptions(Entity):
"""
.. attribute:: option (key)
**type**\: str
.. attribute:: trace_blocks
**type**\: list of :py:class:`TraceBlocks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_sysadmin_time_of_day_timezone.Trace.TimezoneNotify.Trace_.Location.AllOptions.TraceBlocks>`
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneNotify.Trace_.Location.AllOptions, self).__init__()
self.yang_name = "all-options"
self.yang_parent_name = "location"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['option']
self._child_classes = OrderedDict([("trace-blocks", ("trace_blocks", Trace.TimezoneNotify.Trace_.Location.AllOptions.TraceBlocks))])
self._leafs = OrderedDict([
('option', (YLeaf(YType.str, 'option'), ['str'])),
])
self.option = None
self.trace_blocks = YList(self)
self._segment_path = lambda: "all-options" + "[option='" + str(self.option) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneNotify.Trace_.Location.AllOptions, [u'option'], name, value)
class TraceBlocks(Entity):
"""
.. attribute:: data
Trace output block
**type**\: str
"""
_prefix = 'timezone'
_revision = '2016-07-04'
def __init__(self):
super(Trace.TimezoneNotify.Trace_.Location.AllOptions.TraceBlocks, self).__init__()
self.yang_name = "trace-blocks"
self.yang_parent_name = "all-options"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('data', (YLeaf(YType.str, 'data'), ['str'])),
])
self.data = None
self._segment_path = lambda: "trace-blocks"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Trace.TimezoneNotify.Trace_.Location.AllOptions.TraceBlocks, [u'data'], name, value)
def clone_ptr(self):
self._top_entity = Trace()
return self._top_entity
| 36.513321
| 196
| 0.50557
| 1,851
| 20,557
| 5.252836
| 0.069692
| 0.027152
| 0.03394
| 0.038877
| 0.87257
| 0.852515
| 0.840996
| 0.8267
| 0.818369
| 0.802633
| 0
| 0.009094
| 0.390183
| 20,557
| 562
| 197
| 36.578292
| 0.766512
| 0.155324
| 0
| 0.720588
| 0
| 0.007353
| 0.109607
| 0.030396
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102941
| false
| 0
| 0.018382
| 0
| 0.191176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c313d859cbab3035d665686133c3c90a997f9aca
| 6,422
|
py
|
Python
|
code/utils/segmentation/IID_losses.py
|
haasj22/Recycling_Segmentation
|
0420baf4c2ac2bd18b03791e123fb4b0ad7869b1
|
[
"MIT"
] | 1
|
2021-03-26T00:10:17.000Z
|
2021-03-26T00:10:17.000Z
|
code/utils/segmentation/IID_losses.py
|
haasj22/Recycling_Segmentation
|
0420baf4c2ac2bd18b03791e123fb4b0ad7869b1
|
[
"MIT"
] | null | null | null |
code/utils/segmentation/IID_losses.py
|
haasj22/Recycling_Segmentation
|
0420baf4c2ac2bd18b03791e123fb4b0ad7869b1
|
[
"MIT"
] | null | null | null |
from sys import float_info
import torch
import torch.nn.functional as F
from IIC.code.utils.segmentation.render import render
from IIC.code.utils.segmentation.transforms import perform_affine_tf, random_translation_multiple
EPS = float_info.epsilon
RENDER = False
def IID_segmentation_loss(x1_outs, x2_outs, all_affine2_to_1=None,
all_mask_img1=None, lamb=1.0,
half_T_side_dense=None,
half_T_side_sparse_min=None,
half_T_side_sparse_max=None):
assert (x1_outs.requires_grad)
assert (x2_outs.requires_grad)
assert (not all_affine2_to_1.requires_grad)
assert (not all_mask_img1.requires_grad)
assert (x1_outs.shape == x2_outs.shape)
# bring x2 back into x1's spatial frame
x2_outs_inv = perform_affine_tf(x2_outs, all_affine2_to_1)
if (half_T_side_sparse_min != 0) or (half_T_side_sparse_max != 0):
x2_outs_inv = random_translation_multiple(x2_outs_inv,
half_side_min=half_T_side_sparse_min,
half_side_max=half_T_side_sparse_max)
if RENDER:
# indices added to each name by render()
render(x1_outs, mode="image_as_feat", name="invert_img1_")
render(x2_outs, mode="image_as_feat", name="invert_img2_pre_")
render(x2_outs_inv, mode="image_as_feat", name="invert_img2_post_")
render(all_mask_img1, mode="mask", name="invert_mask_")
# zero out all irrelevant patches
bn, k, h, w = x1_outs.shape
all_mask_img1 = all_mask_img1.view(bn, 1, h, w) # mult, already float32
x1_outs = x1_outs * all_mask_img1 # broadcasts
x2_outs_inv = x2_outs_inv * all_mask_img1
# sum over everything except classes, by convolving x1_outs with x2_outs_inv
# which is symmetric, so doesn't matter which one is the filter
x1_outs = x1_outs.permute(1, 0, 2, 3).contiguous() # k, ni, h, w
x2_outs_inv = x2_outs_inv.permute(1, 0, 2, 3).contiguous() # k, ni, h, w
# k, k, 2 * half_T_side_dense + 1,2 * half_T_side_dense + 1
p_i_j = F.conv2d(x1_outs, weight=x2_outs_inv, padding=(half_T_side_dense,
half_T_side_dense))
p_i_j = p_i_j.sum(dim=2, keepdim=False).sum(dim=2, keepdim=False) # k, k
# normalise, use sum, not bn * h * w * T_side * T_side, because we use a mask
# also, some pixels did not have a completely unmasked box neighbourhood,
# but it's fine - just less samples from that pixel
current_norm = float(p_i_j.sum())
p_i_j = p_i_j / current_norm
# symmetrise
p_i_j = (p_i_j + p_i_j.t()) / 2.
# compute marginals
p_i_mat = p_i_j.sum(dim=1).unsqueeze(1) # k, 1
p_j_mat = p_i_j.sum(dim=0).unsqueeze(0) # 1, k
# for log stability; tiny values cancelled out by mult with p_i_j anyway
p_i_j[(p_i_j < EPS).data] = EPS
p_i_mat[(p_i_mat < EPS).data] = EPS
p_j_mat[(p_j_mat < EPS).data] = EPS
# maximise information
loss = (-p_i_j * (torch.log(p_i_j) - lamb * torch.log(p_i_mat) -
lamb * torch.log(p_j_mat))).sum()
# for analysis only
loss_no_lamb = (-p_i_j * (torch.log(p_i_j) - torch.log(p_i_mat) -
torch.log(p_j_mat))).sum()
return loss, loss_no_lamb
def IID_segmentation_loss_uncollapsed(x1_outs, x2_outs, all_affine2_to_1=None,
all_mask_img1=None, lamb=1.0,
half_T_side_dense=None,
half_T_side_sparse_min=None,
half_T_side_sparse_max=None):
assert (x1_outs.requires_grad)
assert (x2_outs.requires_grad)
assert (not all_affine2_to_1.requires_grad)
assert (not all_mask_img1.requires_grad)
assert (x1_outs.shape == x2_outs.shape)
# bring x2 back into x1's spatial frame
x2_outs_inv = perform_affine_tf(x2_outs, all_affine2_to_1)
if (half_T_side_sparse_min != 0) or (half_T_side_sparse_max != 0):
x2_outs_inv = random_translation_multiple(x2_outs_inv,
half_side_min=half_T_side_sparse_min,
half_side_max=half_T_side_sparse_max)
if RENDER:
# indices added to each name by render()
render(x1_outs, mode="image_as_feat", name="invert_img1_")
render(x2_outs, mode="image_as_feat", name="invert_img2_pre_")
render(x2_outs_inv, mode="image_as_feat", name="invert_img2_post_")
render(all_mask_img1, mode="mask", name="invert_mask_")
# zero out all irrelevant patches
bn, k, h, w = x1_outs.shape
all_mask_img1 = all_mask_img1.view(bn, 1, h, w) # mult, already float32
x1_outs = x1_outs * all_mask_img1 # broadcasts
x2_outs_inv = x2_outs_inv * all_mask_img1
# sum over everything except classes, by convolving x1_outs with x2_outs_inv
# which is symmetric, so doesn't matter which one is the filter
x1_outs = x1_outs.permute(1, 0, 2, 3).contiguous() # k, ni, h, w
x2_outs_inv = x2_outs_inv.permute(1, 0, 2, 3).contiguous() # k, ni, h, w
# k, k, 2 * half_T_side_dense + 1,2 * half_T_side_dense + 1
p_i_j = F.conv2d(x1_outs, weight=x2_outs_inv, padding=(half_T_side_dense,
half_T_side_dense))
# do expectation over each shift location in the T_side_dense *
# T_side_dense box
T_side_dense = half_T_side_dense * 2 + 1
# T x T x k x k
p_i_j = p_i_j.permute(2, 3, 0, 1)
p_i_j = p_i_j / p_i_j.sum(dim=3, keepdim=True).sum(dim=2,
keepdim=True) # norm
# symmetrise, transpose the k x k part
p_i_j = (p_i_j + p_i_j.permute(0, 1, 3, 2)) / 2.0
# T x T x k x k
p_i_mat = p_i_j.sum(dim=2, keepdim=True).repeat(1, 1, k, 1)
p_j_mat = p_i_j.sum(dim=3, keepdim=True).repeat(1, 1, 1, k)
# for log stability; tiny values cancelled out by mult with p_i_j anyway
p_i_j[(p_i_j < EPS).data] = EPS
p_i_mat[(p_i_mat < EPS).data] = EPS
p_j_mat[(p_j_mat < EPS).data] = EPS
# maximise information
loss = (-p_i_j * (torch.log(p_i_j) - lamb * torch.log(p_i_mat) -
lamb * torch.log(p_j_mat))).sum() / (
T_side_dense * T_side_dense)
# for analysis only
loss_no_lamb = (-p_i_j * (torch.log(p_i_j) - torch.log(p_i_mat) -
torch.log(p_j_mat))).sum() / (
T_side_dense * T_side_dense)
return loss, loss_no_lamb
| 40.1375
| 97
| 0.643258
| 1,099
| 6,422
| 3.391265
| 0.153776
| 0.024685
| 0.028978
| 0.048296
| 0.856453
| 0.814328
| 0.807352
| 0.79179
| 0.773544
| 0.76147
| 0
| 0.033284
| 0.256151
| 6,422
| 159
| 98
| 40.389937
| 0.746912
| 0.207412
| 0
| 0.723404
| 0
| 0
| 0.039612
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 1
| 0.021277
| false
| 0
| 0.053191
| 0
| 0.095745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c318a3af764abfa557dbd07c3c3187fdc7edd43b
| 125,723
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/network/v20150501preview/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20150501preview/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_nextgen/network/v20150501preview/outputs.py
|
test-wiz-sec/pulumi-azure-nextgen
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'AddressSpaceResponse',
'ApplicationGatewayBackendAddressPoolResponse',
'ApplicationGatewayBackendAddressResponse',
'ApplicationGatewayBackendHttpSettingsResponse',
'ApplicationGatewayFrontendIPConfigurationResponse',
'ApplicationGatewayFrontendPortResponse',
'ApplicationGatewayHttpListenerResponse',
'ApplicationGatewayIPConfigurationResponse',
'ApplicationGatewayRequestRoutingRuleResponse',
'ApplicationGatewaySkuResponse',
'ApplicationGatewaySslCertificateResponse',
'BackendAddressPoolResponse',
'DhcpOptionsResponse',
'ExpressRouteCircuitAuthorizationResponse',
'ExpressRouteCircuitPeeringConfigResponse',
'ExpressRouteCircuitPeeringResponse',
'ExpressRouteCircuitServiceProviderPropertiesResponse',
'ExpressRouteCircuitSkuResponse',
'ExpressRouteCircuitStatsResponse',
'FrontendIpConfigurationResponse',
'InboundNatPoolResponse',
'InboundNatRuleResponse',
'LoadBalancingRuleResponse',
'NetworkInterfaceDnsSettingsResponse',
'NetworkInterfaceIpConfigurationResponse',
'OutboundNatRuleResponse',
'ProbeResponse',
'PublicIpAddressDnsSettingsResponse',
'RouteResponse',
'SecurityRuleResponse',
'SubResourceResponse',
'SubnetResponse',
]
@pulumi.output_type
class AddressSpaceResponse(dict):
"""
AddressSpace contains an array of IP address ranges that can be used by subnets
"""
def __init__(__self__, *,
address_prefixes: Optional[Sequence[str]] = None):
"""
AddressSpace contains an array of IP address ranges that can be used by subnets
:param Sequence[str] address_prefixes: Gets or sets List of address blocks reserved for this virtual network in CIDR notation
"""
if address_prefixes is not None:
pulumi.set(__self__, "address_prefixes", address_prefixes)
@property
@pulumi.getter(name="addressPrefixes")
def address_prefixes(self) -> Optional[Sequence[str]]:
"""
Gets or sets List of address blocks reserved for this virtual network in CIDR notation
"""
return pulumi.get(self, "address_prefixes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayBackendAddressPoolResponse(dict):
"""
Backend Address Pool of application gateway
"""
def __init__(__self__, *,
backend_addresses: Optional[Sequence['outputs.ApplicationGatewayBackendAddressResponse']] = None,
backend_ip_configurations: Optional[Sequence['outputs.SubResourceResponse']] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Backend Address Pool of application gateway
:param Sequence['ApplicationGatewayBackendAddressResponseArgs'] backend_addresses: Gets or sets the backend addresses
:param Sequence['SubResourceResponseArgs'] backend_ip_configurations: Gets or sets backendIPConfiguration of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the backend address pool resource Updating/Deleting/Failed
"""
if backend_addresses is not None:
pulumi.set(__self__, "backend_addresses", backend_addresses)
if backend_ip_configurations is not None:
pulumi.set(__self__, "backend_ip_configurations", backend_ip_configurations)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendAddresses")
def backend_addresses(self) -> Optional[Sequence['outputs.ApplicationGatewayBackendAddressResponse']]:
"""
Gets or sets the backend addresses
"""
return pulumi.get(self, "backend_addresses")
@property
@pulumi.getter(name="backendIPConfigurations")
def backend_ip_configurations(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets or sets backendIPConfiguration of application gateway
"""
return pulumi.get(self, "backend_ip_configurations")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the backend address pool resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayBackendAddressResponse(dict):
"""
Backend Address of application gateway
"""
def __init__(__self__, *,
fqdn: Optional[str] = None,
ip_address: Optional[str] = None):
"""
Backend Address of application gateway
:param str fqdn: Gets or sets the dns name
:param str ip_address: Gets or sets the ip address
"""
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter
def fqdn(self) -> Optional[str]:
"""
Gets or sets the dns name
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
Gets or sets the ip address
"""
return pulumi.get(self, "ip_address")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayBackendHttpSettingsResponse(dict):
"""
Backend address pool settings of application gateway
"""
def __init__(__self__, *,
cookie_based_affinity: Optional[str] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
port: Optional[int] = None,
protocol: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Backend address pool settings of application gateway
:param str cookie_based_affinity: Gets or sets the cookie affinity
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param int port: Gets or sets the port
:param str protocol: Gets or sets the protocol
:param str provisioning_state: Gets or sets Provisioning state of the backend http settings resource Updating/Deleting/Failed
"""
if cookie_based_affinity is not None:
pulumi.set(__self__, "cookie_based_affinity", cookie_based_affinity)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="cookieBasedAffinity")
def cookie_based_affinity(self) -> Optional[str]:
"""
Gets or sets the cookie affinity
"""
return pulumi.get(self, "cookie_based_affinity")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
Gets or sets the port
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
Gets or sets the protocol
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the backend http settings resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayFrontendIPConfigurationResponse(dict):
"""
Frontend IP configuration of application gateway
"""
def __init__(__self__, *,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[str] = None,
provisioning_state: Optional[str] = None,
public_ip_address: Optional['outputs.SubResourceResponse'] = None,
subnet: Optional['outputs.SubResourceResponse'] = None):
"""
Frontend IP configuration of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str private_ip_address: Gets or sets the privateIPAddress of the Network Interface IP Configuration
:param str private_ip_allocation_method: Gets or sets PrivateIP allocation method (Static/Dynamic)
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' public_ip_address: Gets or sets the reference of the PublicIP resource
:param 'SubResourceResponseArgs' subnet: Gets or sets the reference of the subnet resource
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[str]:
"""
Gets or sets the privateIPAddress of the Network Interface IP Configuration
"""
return pulumi.get(self, "private_ip_address")
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[str]:
"""
Gets or sets PrivateIP allocation method (Static/Dynamic)
"""
return pulumi.get(self, "private_ip_allocation_method")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the PublicIP resource
"""
return pulumi.get(self, "public_ip_address")
@property
@pulumi.getter
def subnet(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the subnet resource
"""
return pulumi.get(self, "subnet")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayFrontendPortResponse(dict):
"""
Frontend Port of application gateway
"""
def __init__(__self__, *,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
port: Optional[int] = None,
provisioning_state: Optional[str] = None):
"""
Frontend Port of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param int port: Gets or sets the frontend port
:param str provisioning_state: Gets or sets Provisioning state of the frontend port resource Updating/Deleting/Failed
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
Gets or sets the frontend port
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the frontend port resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayHttpListenerResponse(dict):
"""
Http listener of application gateway
"""
def __init__(__self__, *,
etag: Optional[str] = None,
frontend_ip_configuration: Optional['outputs.SubResourceResponse'] = None,
frontend_port: Optional['outputs.SubResourceResponse'] = None,
id: Optional[str] = None,
name: Optional[str] = None,
protocol: Optional[str] = None,
provisioning_state: Optional[str] = None,
ssl_certificate: Optional['outputs.SubResourceResponse'] = None):
"""
Http listener of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param 'SubResourceResponseArgs' frontend_ip_configuration: Gets or sets frontend IP configuration resource of application gateway
:param 'SubResourceResponseArgs' frontend_port: Gets or sets frontend port resource of application gateway
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str protocol: Gets or sets the protocol
:param str provisioning_state: Gets or sets Provisioning state of the http listener resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' ssl_certificate: Gets or sets ssl certificate resource of application gateway
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if frontend_port is not None:
pulumi.set(__self__, "frontend_port", frontend_port)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if ssl_certificate is not None:
pulumi.set(__self__, "ssl_certificate", ssl_certificate)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets frontend IP configuration resource of application gateway
"""
return pulumi.get(self, "frontend_ip_configuration")
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets frontend port resource of application gateway
"""
return pulumi.get(self, "frontend_port")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
Gets or sets the protocol
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the http listener resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sslCertificate")
def ssl_certificate(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets ssl certificate resource of application gateway
"""
return pulumi.get(self, "ssl_certificate")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayIPConfigurationResponse(dict):
"""
IP configuration of application gateway
"""
def __init__(__self__, *,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None,
subnet: Optional['outputs.SubResourceResponse'] = None):
"""
IP configuration of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the application gateway subnet resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' subnet: Gets or sets the reference of the subnet resource.A subnet from where application gateway gets its private address
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the application gateway subnet resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def subnet(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the subnet resource.A subnet from where application gateway gets its private address
"""
return pulumi.get(self, "subnet")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewayRequestRoutingRuleResponse(dict):
"""
Request routing rule of application gateway
"""
def __init__(__self__, *,
backend_address_pool: Optional['outputs.SubResourceResponse'] = None,
backend_http_settings: Optional['outputs.SubResourceResponse'] = None,
etag: Optional[str] = None,
http_listener: Optional['outputs.SubResourceResponse'] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None,
rule_type: Optional[str] = None):
"""
Request routing rule of application gateway
:param 'SubResourceResponseArgs' backend_address_pool: Gets or sets backend address pool resource of application gateway
:param 'SubResourceResponseArgs' backend_http_settings: Gets or sets frontend port resource of application gateway
:param str etag: A unique read-only string that changes whenever the resource is updated
:param 'SubResourceResponseArgs' http_listener: Gets or sets http listener resource of application gateway
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the request routing rule resource Updating/Deleting/Failed
:param str rule_type: Gets or sets the rule type
"""
if backend_address_pool is not None:
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if backend_http_settings is not None:
pulumi.set(__self__, "backend_http_settings", backend_http_settings)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if http_listener is not None:
pulumi.set(__self__, "http_listener", http_listener)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if rule_type is not None:
pulumi.set(__self__, "rule_type", rule_type)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets backend address pool resource of application gateway
"""
return pulumi.get(self, "backend_address_pool")
@property
@pulumi.getter(name="backendHttpSettings")
def backend_http_settings(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets frontend port resource of application gateway
"""
return pulumi.get(self, "backend_http_settings")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="httpListener")
def http_listener(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets http listener resource of application gateway
"""
return pulumi.get(self, "http_listener")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the request routing rule resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="ruleType")
def rule_type(self) -> Optional[str]:
"""
Gets or sets the rule type
"""
return pulumi.get(self, "rule_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewaySkuResponse(dict):
"""
SKU of application gateway
"""
def __init__(__self__, *,
capacity: Optional[int] = None,
name: Optional[str] = None,
tier: Optional[str] = None):
"""
SKU of application gateway
:param int capacity: Gets or sets capacity (instance count) of application gateway
:param str name: Gets or sets name of application gateway SKU
:param str tier: Gets or sets tier of application gateway
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
Gets or sets capacity (instance count) of application gateway
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets or sets name of application gateway SKU
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
Gets or sets tier of application gateway
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ApplicationGatewaySslCertificateResponse(dict):
"""
SSL certificates of application gateway
"""
def __init__(__self__, *,
data: Optional[str] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
password: Optional[str] = None,
provisioning_state: Optional[str] = None,
public_cert_data: Optional[str] = None):
"""
SSL certificates of application gateway
:param str data: Gets or sets the certificate data
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str password: Gets or sets the certificate password
:param str provisioning_state: Gets or sets Provisioning state of the ssl certificate resource Updating/Deleting/Failed
:param str public_cert_data: Gets or sets the certificate public data
"""
if data is not None:
pulumi.set(__self__, "data", data)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if password is not None:
pulumi.set(__self__, "password", password)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_cert_data is not None:
pulumi.set(__self__, "public_cert_data", public_cert_data)
@property
@pulumi.getter
def data(self) -> Optional[str]:
"""
Gets or sets the certificate data
"""
return pulumi.get(self, "data")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Gets or sets the certificate password
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the ssl certificate resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicCertData")
def public_cert_data(self) -> Optional[str]:
"""
Gets or sets the certificate public data
"""
return pulumi.get(self, "public_cert_data")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendAddressPoolResponse(dict):
"""
Pool of backend IP addresses
"""
def __init__(__self__, *,
backend_ip_configurations: Optional[Sequence['outputs.SubResourceResponse']] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
load_balancing_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
name: Optional[str] = None,
outbound_nat_rule: Optional['outputs.SubResourceResponse'] = None,
provisioning_state: Optional[str] = None):
"""
Pool of backend IP addresses
:param Sequence['SubResourceResponseArgs'] backend_ip_configurations: Gets collection of references to IPs defined in NICs
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param Sequence['SubResourceResponseArgs'] load_balancing_rules: Gets Load Balancing rules that use this Backend Address Pool
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param 'SubResourceResponseArgs' outbound_nat_rule: Gets outbound rules that use this Backend Address Pool
:param str provisioning_state: Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
if backend_ip_configurations is not None:
pulumi.set(__self__, "backend_ip_configurations", backend_ip_configurations)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if load_balancing_rules is not None:
pulumi.set(__self__, "load_balancing_rules", load_balancing_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if outbound_nat_rule is not None:
pulumi.set(__self__, "outbound_nat_rule", outbound_nat_rule)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendIPConfigurations")
def backend_ip_configurations(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets collection of references to IPs defined in NICs
"""
return pulumi.get(self, "backend_ip_configurations")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loadBalancingRules")
def load_balancing_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets Load Balancing rules that use this Backend Address Pool
"""
return pulumi.get(self, "load_balancing_rules")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outboundNatRule")
def outbound_nat_rule(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets outbound rules that use this Backend Address Pool
"""
return pulumi.get(self, "outbound_nat_rule")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DhcpOptionsResponse(dict):
"""
DHCPOptions contains an array of DNS servers available to VMs deployed in the virtual networkStandard DHCP option for a subnet overrides VNET DHCP options.
"""
def __init__(__self__, *,
dns_servers: Optional[Sequence[str]] = None):
"""
DHCPOptions contains an array of DNS servers available to VMs deployed in the virtual networkStandard DHCP option for a subnet overrides VNET DHCP options.
:param Sequence[str] dns_servers: Gets or sets list of DNS servers IP addresses
"""
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[Sequence[str]]:
"""
Gets or sets list of DNS servers IP addresses
"""
return pulumi.get(self, "dns_servers")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitAuthorizationResponse(dict):
"""
Authorization in a ExpressRouteCircuit resource
"""
def __init__(__self__, *,
authorization_key: Optional[str] = None,
authorization_use_status: Optional[str] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Authorization in a ExpressRouteCircuit resource
:param str authorization_key: Gets or sets the authorization key
:param str authorization_use_status: Gets or sets AuthorizationUseStatus
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
if authorization_key is not None:
pulumi.set(__self__, "authorization_key", authorization_key)
if authorization_use_status is not None:
pulumi.set(__self__, "authorization_use_status", authorization_use_status)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="authorizationKey")
def authorization_key(self) -> Optional[str]:
"""
Gets or sets the authorization key
"""
return pulumi.get(self, "authorization_key")
@property
@pulumi.getter(name="authorizationUseStatus")
def authorization_use_status(self) -> Optional[str]:
"""
Gets or sets AuthorizationUseStatus
"""
return pulumi.get(self, "authorization_use_status")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitPeeringConfigResponse(dict):
"""
Specifies the peering config
"""
def __init__(__self__, *,
advertised_public_prefixes: Optional[Sequence[str]] = None,
advertised_public_prefixes_state: Optional[str] = None,
customer_asn: Optional[int] = None,
routing_registry_name: Optional[str] = None):
"""
Specifies the peering config
:param Sequence[str] advertised_public_prefixes: Gets or sets the reference of AdvertisedPublicPrefixes
:param str advertised_public_prefixes_state: Gets or sets AdvertisedPublicPrefixState of the Peering resource
:param int customer_asn: Gets or Sets CustomerAsn of the peering.
:param str routing_registry_name: Gets or Sets RoutingRegistryName of the config.
"""
if advertised_public_prefixes is not None:
pulumi.set(__self__, "advertised_public_prefixes", advertised_public_prefixes)
if advertised_public_prefixes_state is not None:
pulumi.set(__self__, "advertised_public_prefixes_state", advertised_public_prefixes_state)
if customer_asn is not None:
pulumi.set(__self__, "customer_asn", customer_asn)
if routing_registry_name is not None:
pulumi.set(__self__, "routing_registry_name", routing_registry_name)
@property
@pulumi.getter(name="advertisedPublicPrefixes")
def advertised_public_prefixes(self) -> Optional[Sequence[str]]:
"""
Gets or sets the reference of AdvertisedPublicPrefixes
"""
return pulumi.get(self, "advertised_public_prefixes")
@property
@pulumi.getter(name="advertisedPublicPrefixesState")
def advertised_public_prefixes_state(self) -> Optional[str]:
"""
Gets or sets AdvertisedPublicPrefixState of the Peering resource
"""
return pulumi.get(self, "advertised_public_prefixes_state")
@property
@pulumi.getter(name="customerASN")
def customer_asn(self) -> Optional[int]:
"""
Gets or Sets CustomerAsn of the peering.
"""
return pulumi.get(self, "customer_asn")
@property
@pulumi.getter(name="routingRegistryName")
def routing_registry_name(self) -> Optional[str]:
"""
Gets or Sets RoutingRegistryName of the config.
"""
return pulumi.get(self, "routing_registry_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitPeeringResponse(dict):
"""
Peering in a ExpressRouteCircuit resource
"""
def __init__(__self__, *,
azure_asn: Optional[int] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
microsoft_peering_config: Optional['outputs.ExpressRouteCircuitPeeringConfigResponse'] = None,
name: Optional[str] = None,
peer_asn: Optional[int] = None,
peering_type: Optional[str] = None,
primary_azure_port: Optional[str] = None,
primary_peer_address_prefix: Optional[str] = None,
provisioning_state: Optional[str] = None,
secondary_azure_port: Optional[str] = None,
secondary_peer_address_prefix: Optional[str] = None,
shared_key: Optional[str] = None,
state: Optional[str] = None,
stats: Optional['outputs.ExpressRouteCircuitStatsResponse'] = None,
vlan_id: Optional[int] = None):
"""
Peering in a ExpressRouteCircuit resource
:param int azure_asn: Gets or sets the azure ASN
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param 'ExpressRouteCircuitPeeringConfigResponseArgs' microsoft_peering_config: Gets or sets the Microsoft peering config
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param int peer_asn: Gets or sets the peer ASN
:param str peering_type: Gets or sets PeeringType
:param str primary_azure_port: Gets or sets the primary port
:param str primary_peer_address_prefix: Gets or sets the primary address prefix
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param str secondary_azure_port: Gets or sets the secondary port
:param str secondary_peer_address_prefix: Gets or sets the secondary address prefix
:param str shared_key: Gets or sets the shared key
:param str state: Gets or sets state of Peering
:param 'ExpressRouteCircuitStatsResponseArgs' stats: Gets or peering stats
:param int vlan_id: Gets or sets the vlan id
"""
if azure_asn is not None:
pulumi.set(__self__, "azure_asn", azure_asn)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if microsoft_peering_config is not None:
pulumi.set(__self__, "microsoft_peering_config", microsoft_peering_config)
if name is not None:
pulumi.set(__self__, "name", name)
if peer_asn is not None:
pulumi.set(__self__, "peer_asn", peer_asn)
if peering_type is not None:
pulumi.set(__self__, "peering_type", peering_type)
if primary_azure_port is not None:
pulumi.set(__self__, "primary_azure_port", primary_azure_port)
if primary_peer_address_prefix is not None:
pulumi.set(__self__, "primary_peer_address_prefix", primary_peer_address_prefix)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if secondary_azure_port is not None:
pulumi.set(__self__, "secondary_azure_port", secondary_azure_port)
if secondary_peer_address_prefix is not None:
pulumi.set(__self__, "secondary_peer_address_prefix", secondary_peer_address_prefix)
if shared_key is not None:
pulumi.set(__self__, "shared_key", shared_key)
if state is not None:
pulumi.set(__self__, "state", state)
if stats is not None:
pulumi.set(__self__, "stats", stats)
if vlan_id is not None:
pulumi.set(__self__, "vlan_id", vlan_id)
@property
@pulumi.getter(name="azureASN")
def azure_asn(self) -> Optional[int]:
"""
Gets or sets the azure ASN
"""
return pulumi.get(self, "azure_asn")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="microsoftPeeringConfig")
def microsoft_peering_config(self) -> Optional['outputs.ExpressRouteCircuitPeeringConfigResponse']:
"""
Gets or sets the Microsoft peering config
"""
return pulumi.get(self, "microsoft_peering_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="peerASN")
def peer_asn(self) -> Optional[int]:
"""
Gets or sets the peer ASN
"""
return pulumi.get(self, "peer_asn")
@property
@pulumi.getter(name="peeringType")
def peering_type(self) -> Optional[str]:
"""
Gets or sets PeeringType
"""
return pulumi.get(self, "peering_type")
@property
@pulumi.getter(name="primaryAzurePort")
def primary_azure_port(self) -> Optional[str]:
"""
Gets or sets the primary port
"""
return pulumi.get(self, "primary_azure_port")
@property
@pulumi.getter(name="primaryPeerAddressPrefix")
def primary_peer_address_prefix(self) -> Optional[str]:
"""
Gets or sets the primary address prefix
"""
return pulumi.get(self, "primary_peer_address_prefix")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="secondaryAzurePort")
def secondary_azure_port(self) -> Optional[str]:
"""
Gets or sets the secondary port
"""
return pulumi.get(self, "secondary_azure_port")
@property
@pulumi.getter(name="secondaryPeerAddressPrefix")
def secondary_peer_address_prefix(self) -> Optional[str]:
"""
Gets or sets the secondary address prefix
"""
return pulumi.get(self, "secondary_peer_address_prefix")
@property
@pulumi.getter(name="sharedKey")
def shared_key(self) -> Optional[str]:
"""
Gets or sets the shared key
"""
return pulumi.get(self, "shared_key")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
Gets or sets state of Peering
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def stats(self) -> Optional['outputs.ExpressRouteCircuitStatsResponse']:
"""
Gets or peering stats
"""
return pulumi.get(self, "stats")
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> Optional[int]:
"""
Gets or sets the vlan id
"""
return pulumi.get(self, "vlan_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitServiceProviderPropertiesResponse(dict):
"""
Contains ServiceProviderProperties in an ExpressRouteCircuit
"""
def __init__(__self__, *,
bandwidth_in_mbps: Optional[int] = None,
peering_location: Optional[str] = None,
service_provider_name: Optional[str] = None):
"""
Contains ServiceProviderProperties in an ExpressRouteCircuit
:param int bandwidth_in_mbps: Gets or sets BandwidthInMbps.
:param str peering_location: Gets or sets peering location.
:param str service_provider_name: Gets or sets serviceProviderName.
"""
if bandwidth_in_mbps is not None:
pulumi.set(__self__, "bandwidth_in_mbps", bandwidth_in_mbps)
if peering_location is not None:
pulumi.set(__self__, "peering_location", peering_location)
if service_provider_name is not None:
pulumi.set(__self__, "service_provider_name", service_provider_name)
@property
@pulumi.getter(name="bandwidthInMbps")
def bandwidth_in_mbps(self) -> Optional[int]:
"""
Gets or sets BandwidthInMbps.
"""
return pulumi.get(self, "bandwidth_in_mbps")
@property
@pulumi.getter(name="peeringLocation")
def peering_location(self) -> Optional[str]:
"""
Gets or sets peering location.
"""
return pulumi.get(self, "peering_location")
@property
@pulumi.getter(name="serviceProviderName")
def service_provider_name(self) -> Optional[str]:
"""
Gets or sets serviceProviderName.
"""
return pulumi.get(self, "service_provider_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitSkuResponse(dict):
"""
Contains sku in an ExpressRouteCircuit
"""
def __init__(__self__, *,
family: Optional[str] = None,
name: Optional[str] = None,
tier: Optional[str] = None):
"""
Contains sku in an ExpressRouteCircuit
:param str family: Gets or sets family of the sku.
:param str name: Gets or sets name of the sku.
:param str tier: Gets or sets tier of the sku.
"""
if family is not None:
pulumi.set(__self__, "family", family)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def family(self) -> Optional[str]:
"""
Gets or sets family of the sku.
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets or sets name of the sku.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
Gets or sets tier of the sku.
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExpressRouteCircuitStatsResponse(dict):
"""
Contains Stats associated with the peering
"""
def __init__(__self__, *,
bytes_in: Optional[int] = None,
bytes_out: Optional[int] = None):
"""
Contains Stats associated with the peering
:param int bytes_in: Gets BytesIn of the peering.
:param int bytes_out: Gets BytesOut of the peering.
"""
if bytes_in is not None:
pulumi.set(__self__, "bytes_in", bytes_in)
if bytes_out is not None:
pulumi.set(__self__, "bytes_out", bytes_out)
@property
@pulumi.getter(name="bytesIn")
def bytes_in(self) -> Optional[int]:
"""
Gets BytesIn of the peering.
"""
return pulumi.get(self, "bytes_in")
@property
@pulumi.getter(name="bytesOut")
def bytes_out(self) -> Optional[int]:
"""
Gets BytesOut of the peering.
"""
return pulumi.get(self, "bytes_out")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FrontendIpConfigurationResponse(dict):
"""
Frontend IP address of the load balancer
"""
def __init__(__self__, *,
etag: Optional[str] = None,
id: Optional[str] = None,
inbound_nat_pools: Optional[Sequence['outputs.SubResourceResponse']] = None,
inbound_nat_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
load_balancing_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
name: Optional[str] = None,
outbound_nat_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[str] = None,
provisioning_state: Optional[str] = None,
public_ip_address: Optional['outputs.SubResourceResponse'] = None,
subnet: Optional['outputs.SubResourceResponse'] = None):
"""
Frontend IP address of the load balancer
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param Sequence['SubResourceResponseArgs'] inbound_nat_pools: Read only.Inbound pools URIs that use this frontend IP
:param Sequence['SubResourceResponseArgs'] inbound_nat_rules: Read only.Inbound rules URIs that use this frontend IP
:param Sequence['SubResourceResponseArgs'] load_balancing_rules: Gets Load Balancing rules URIs that use this frontend IP
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param Sequence['SubResourceResponseArgs'] outbound_nat_rules: Read only.Outbound rules URIs that use this frontend IP
:param str private_ip_address: Gets or sets the IP address of the Load Balancer.This is only specified if a specific private IP address shall be allocated from the subnet specified in subnetRef
:param str private_ip_allocation_method: Gets or sets PrivateIP allocation method (Static/Dynamic)
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' public_ip_address: Gets or sets the reference of the PublicIP resource
:param 'SubResourceResponseArgs' subnet: Gets or sets the reference of the subnet resource.A subnet from where the load balancer gets its private frontend address
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if inbound_nat_pools is not None:
pulumi.set(__self__, "inbound_nat_pools", inbound_nat_pools)
if inbound_nat_rules is not None:
pulumi.set(__self__, "inbound_nat_rules", inbound_nat_rules)
if load_balancing_rules is not None:
pulumi.set(__self__, "load_balancing_rules", load_balancing_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if outbound_nat_rules is not None:
pulumi.set(__self__, "outbound_nat_rules", outbound_nat_rules)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="inboundNatPools")
def inbound_nat_pools(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Read only.Inbound pools URIs that use this frontend IP
"""
return pulumi.get(self, "inbound_nat_pools")
@property
@pulumi.getter(name="inboundNatRules")
def inbound_nat_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Read only.Inbound rules URIs that use this frontend IP
"""
return pulumi.get(self, "inbound_nat_rules")
@property
@pulumi.getter(name="loadBalancingRules")
def load_balancing_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets Load Balancing rules URIs that use this frontend IP
"""
return pulumi.get(self, "load_balancing_rules")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outboundNatRules")
def outbound_nat_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Read only.Outbound rules URIs that use this frontend IP
"""
return pulumi.get(self, "outbound_nat_rules")
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[str]:
"""
Gets or sets the IP address of the Load Balancer.This is only specified if a specific private IP address shall be allocated from the subnet specified in subnetRef
"""
return pulumi.get(self, "private_ip_address")
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[str]:
"""
Gets or sets PrivateIP allocation method (Static/Dynamic)
"""
return pulumi.get(self, "private_ip_allocation_method")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the PublicIP resource
"""
return pulumi.get(self, "public_ip_address")
@property
@pulumi.getter
def subnet(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the subnet resource.A subnet from where the load balancer gets its private frontend address
"""
return pulumi.get(self, "subnet")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InboundNatPoolResponse(dict):
"""
Inbound NAT pool of the loadbalancer
"""
def __init__(__self__, *,
backend_port: int,
frontend_port_range_end: int,
frontend_port_range_start: int,
protocol: str,
etag: Optional[str] = None,
frontend_ip_configuration: Optional['outputs.SubResourceResponse'] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Inbound NAT pool of the loadbalancer
:param int backend_port: Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
:param int frontend_port_range_end: Gets or sets the ending port range for the NAT pool. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
:param int frontend_port_range_start: Gets or sets the starting port range for the NAT pool. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
:param str protocol: Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
:param str etag: A unique read-only string that changes whenever the resource is updated
:param 'SubResourceResponseArgs' frontend_ip_configuration: Gets or sets a reference to frontend IP Addresses
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
pulumi.set(__self__, "backend_port", backend_port)
pulumi.set(__self__, "frontend_port_range_end", frontend_port_range_end)
pulumi.set(__self__, "frontend_port_range_start", frontend_port_range_start)
pulumi.set(__self__, "protocol", protocol)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> int:
"""
Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
"""
return pulumi.get(self, "backend_port")
@property
@pulumi.getter(name="frontendPortRangeEnd")
def frontend_port_range_end(self) -> int:
"""
Gets or sets the ending port range for the NAT pool. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
"""
return pulumi.get(self, "frontend_port_range_end")
@property
@pulumi.getter(name="frontendPortRangeStart")
def frontend_port_range_start(self) -> int:
"""
Gets or sets the starting port range for the NAT pool. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
"""
return pulumi.get(self, "frontend_port_range_start")
@property
@pulumi.getter
def protocol(self) -> str:
"""
Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets a reference to frontend IP Addresses
"""
return pulumi.get(self, "frontend_ip_configuration")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InboundNatRuleResponse(dict):
"""
Inbound NAT rule of the loadbalancer
"""
def __init__(__self__, *,
enable_floating_ip: bool,
frontend_port: int,
protocol: str,
backend_ip_configuration: Optional['outputs.SubResourceResponse'] = None,
backend_port: Optional[int] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional['outputs.SubResourceResponse'] = None,
id: Optional[str] = None,
idle_timeout_in_minutes: Optional[int] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Inbound NAT rule of the loadbalancer
:param bool enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn availability Group. This setting is required when using the SQL Always ON availability Groups in SQL server. This setting can't be changed after you create the endpoint
:param int frontend_port: Gets or sets the port for the external endpoint. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
:param str protocol: Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
:param 'SubResourceResponseArgs' backend_ip_configuration: Gets or sets a reference to a private ip address defined on a NetworkInterface of a VM. Traffic sent to frontendPort of each of the frontendIPConfigurations is forwarded to the backed IP
:param int backend_port: Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
:param str etag: A unique read-only string that changes whenever the resource is updated
:param 'SubResourceResponseArgs' frontend_ip_configuration: Gets or sets a reference to frontend IP Addresses
:param str id: Resource Id
:param int idle_timeout_in_minutes: Gets or sets the timeout for the Tcp idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to Tcp
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
pulumi.set(__self__, "enable_floating_ip", enable_floating_ip)
pulumi.set(__self__, "frontend_port", frontend_port)
pulumi.set(__self__, "protocol", protocol)
if backend_ip_configuration is not None:
pulumi.set(__self__, "backend_ip_configuration", backend_ip_configuration)
if backend_port is not None:
pulumi.set(__self__, "backend_port", backend_port)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="enableFloatingIP")
def enable_floating_ip(self) -> bool:
"""
Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn availability Group. This setting is required when using the SQL Always ON availability Groups in SQL server. This setting can't be changed after you create the endpoint
"""
return pulumi.get(self, "enable_floating_ip")
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> int:
"""
Gets or sets the port for the external endpoint. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
"""
return pulumi.get(self, "frontend_port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="backendIPConfiguration")
def backend_ip_configuration(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets a reference to a private ip address defined on a NetworkInterface of a VM. Traffic sent to frontendPort of each of the frontendIPConfigurations is forwarded to the backed IP
"""
return pulumi.get(self, "backend_ip_configuration")
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> Optional[int]:
"""
Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
"""
return pulumi.get(self, "backend_port")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets a reference to frontend IP Addresses
"""
return pulumi.get(self, "frontend_ip_configuration")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[int]:
"""
Gets or sets the timeout for the Tcp idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to Tcp
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class LoadBalancingRuleResponse(dict):
"""
Rules of the load balancer
"""
def __init__(__self__, *,
backend_address_pool: 'outputs.SubResourceResponse',
enable_floating_ip: bool,
frontend_port: int,
protocol: str,
backend_port: Optional[int] = None,
etag: Optional[str] = None,
frontend_ip_configuration: Optional['outputs.SubResourceResponse'] = None,
id: Optional[str] = None,
idle_timeout_in_minutes: Optional[int] = None,
load_distribution: Optional[str] = None,
name: Optional[str] = None,
probe: Optional['outputs.SubResourceResponse'] = None,
provisioning_state: Optional[str] = None):
"""
Rules of the load balancer
:param 'SubResourceResponseArgs' backend_address_pool: Gets or sets a reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs
:param bool enable_floating_ip: Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn availability Group. This setting is required when using the SQL Always ON availability Groups in SQL server. This setting can't be changed after you create the endpoint
:param int frontend_port: Gets or sets the port for the external endpoint. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
:param str protocol: Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
:param int backend_port: Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
:param str etag: A unique read-only string that changes whenever the resource is updated
:param 'SubResourceResponseArgs' frontend_ip_configuration: Gets or sets a reference to frontend IP Addresses
:param str id: Resource Id
:param int idle_timeout_in_minutes: Gets or sets the timeout for the Tcp idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to Tcp
:param str load_distribution: Gets or sets the load distribution policy for this rule
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param 'SubResourceResponseArgs' probe: Gets or sets the reference of the load balancer probe used by the Load Balancing rule.
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
pulumi.set(__self__, "enable_floating_ip", enable_floating_ip)
pulumi.set(__self__, "frontend_port", frontend_port)
pulumi.set(__self__, "protocol", protocol)
if backend_port is not None:
pulumi.set(__self__, "backend_port", backend_port)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configuration is not None:
pulumi.set(__self__, "frontend_ip_configuration", frontend_ip_configuration)
if id is not None:
pulumi.set(__self__, "id", id)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if load_distribution is not None:
pulumi.set(__self__, "load_distribution", load_distribution)
if name is not None:
pulumi.set(__self__, "name", name)
if probe is not None:
pulumi.set(__self__, "probe", probe)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> 'outputs.SubResourceResponse':
"""
Gets or sets a reference to a pool of DIPs. Inbound traffic is randomly load balanced across IPs in the backend IPs
"""
return pulumi.get(self, "backend_address_pool")
@property
@pulumi.getter(name="enableFloatingIP")
def enable_floating_ip(self) -> bool:
"""
Configures a virtual machine's endpoint for the floating IP capability required to configure a SQL AlwaysOn availability Group. This setting is required when using the SQL Always ON availability Groups in SQL server. This setting can't be changed after you create the endpoint
"""
return pulumi.get(self, "enable_floating_ip")
@property
@pulumi.getter(name="frontendPort")
def frontend_port(self) -> int:
"""
Gets or sets the port for the external endpoint. You can specify any port number you choose, but the port numbers specified for each role in the service must be unique. Possible values range between 1 and 65535, inclusive
"""
return pulumi.get(self, "frontend_port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
Gets or sets the transport protocol for the external endpoint. Possible values are Udp or Tcp
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="backendPort")
def backend_port(self) -> Optional[int]:
"""
Gets or sets a port used for internal connections on the endpoint. The localPort attribute maps the eternal port of the endpoint to an internal port on a role. This is useful in scenarios where a role must communicate to an internal component on a port that is different from the one that is exposed externally. If not specified, the value of localPort is the same as the port attribute. Set the value of localPort to '*' to automatically assign an unallocated port that is discoverable using the runtime API
"""
return pulumi.get(self, "backend_port")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfiguration")
def frontend_ip_configuration(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets a reference to frontend IP Addresses
"""
return pulumi.get(self, "frontend_ip_configuration")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[int]:
"""
Gets or sets the timeout for the Tcp idle connection. The value can be set between 4 and 30 minutes. The default value is 4 minutes. This element is only used when the protocol is set to Tcp
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter(name="loadDistribution")
def load_distribution(self) -> Optional[str]:
"""
Gets or sets the load distribution policy for this rule
"""
return pulumi.get(self, "load_distribution")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def probe(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the load balancer probe used by the Load Balancing rule.
"""
return pulumi.get(self, "probe")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NetworkInterfaceDnsSettingsResponse(dict):
"""
Dns Settings of a network interface
"""
def __init__(__self__, *,
applied_dns_servers: Optional[Sequence[str]] = None,
dns_servers: Optional[Sequence[str]] = None,
internal_dns_name_label: Optional[str] = None,
internal_fqdn: Optional[str] = None):
"""
Dns Settings of a network interface
:param Sequence[str] applied_dns_servers: Gets or sets list of Applied DNS servers IP addresses
:param Sequence[str] dns_servers: Gets or sets list of DNS servers IP addresses
:param str internal_dns_name_label: Gets or sets the Internal DNS name
:param str internal_fqdn: Gets or sets full IDNS name in the form, DnsName.VnetId.ZoneId.TopLevelSuffix. This is set when the NIC is associated to a VM
"""
if applied_dns_servers is not None:
pulumi.set(__self__, "applied_dns_servers", applied_dns_servers)
if dns_servers is not None:
pulumi.set(__self__, "dns_servers", dns_servers)
if internal_dns_name_label is not None:
pulumi.set(__self__, "internal_dns_name_label", internal_dns_name_label)
if internal_fqdn is not None:
pulumi.set(__self__, "internal_fqdn", internal_fqdn)
@property
@pulumi.getter(name="appliedDnsServers")
def applied_dns_servers(self) -> Optional[Sequence[str]]:
"""
Gets or sets list of Applied DNS servers IP addresses
"""
return pulumi.get(self, "applied_dns_servers")
@property
@pulumi.getter(name="dnsServers")
def dns_servers(self) -> Optional[Sequence[str]]:
"""
Gets or sets list of DNS servers IP addresses
"""
return pulumi.get(self, "dns_servers")
@property
@pulumi.getter(name="internalDnsNameLabel")
def internal_dns_name_label(self) -> Optional[str]:
"""
Gets or sets the Internal DNS name
"""
return pulumi.get(self, "internal_dns_name_label")
@property
@pulumi.getter(name="internalFqdn")
def internal_fqdn(self) -> Optional[str]:
"""
Gets or sets full IDNS name in the form, DnsName.VnetId.ZoneId.TopLevelSuffix. This is set when the NIC is associated to a VM
"""
return pulumi.get(self, "internal_fqdn")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NetworkInterfaceIpConfigurationResponse(dict):
"""
IPConfiguration in a NetworkInterface
"""
def __init__(__self__, *,
etag: Optional[str] = None,
id: Optional[str] = None,
load_balancer_backend_address_pools: Optional[Sequence['outputs.SubResourceResponse']] = None,
load_balancer_inbound_nat_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
name: Optional[str] = None,
private_ip_address: Optional[str] = None,
private_ip_allocation_method: Optional[str] = None,
provisioning_state: Optional[str] = None,
public_ip_address: Optional['outputs.SubResourceResponse'] = None,
subnet: Optional['outputs.SubResourceResponse'] = None):
"""
IPConfiguration in a NetworkInterface
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param Sequence['SubResourceResponseArgs'] load_balancer_backend_address_pools: Gets or sets the reference of LoadBalancerBackendAddressPool resource
:param Sequence['SubResourceResponseArgs'] load_balancer_inbound_nat_rules: Gets or sets list of references of LoadBalancerInboundNatRules
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str private_ip_address: Gets or sets the privateIPAddress of the Network Interface IP Configuration
:param str private_ip_allocation_method: Gets or sets PrivateIP allocation method (Static/Dynamic)
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' public_ip_address: Gets or sets the reference of the PublicIP resource
:param 'SubResourceResponseArgs' subnet: Gets or sets the reference of the subnet resource
"""
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if load_balancer_backend_address_pools is not None:
pulumi.set(__self__, "load_balancer_backend_address_pools", load_balancer_backend_address_pools)
if load_balancer_inbound_nat_rules is not None:
pulumi.set(__self__, "load_balancer_inbound_nat_rules", load_balancer_inbound_nat_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if private_ip_allocation_method is not None:
pulumi.set(__self__, "private_ip_allocation_method", private_ip_allocation_method)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loadBalancerBackendAddressPools")
def load_balancer_backend_address_pools(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets or sets the reference of LoadBalancerBackendAddressPool resource
"""
return pulumi.get(self, "load_balancer_backend_address_pools")
@property
@pulumi.getter(name="loadBalancerInboundNatRules")
def load_balancer_inbound_nat_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets or sets list of references of LoadBalancerInboundNatRules
"""
return pulumi.get(self, "load_balancer_inbound_nat_rules")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateIPAddress")
def private_ip_address(self) -> Optional[str]:
"""
Gets or sets the privateIPAddress of the Network Interface IP Configuration
"""
return pulumi.get(self, "private_ip_address")
@property
@pulumi.getter(name="privateIPAllocationMethod")
def private_ip_allocation_method(self) -> Optional[str]:
"""
Gets or sets PrivateIP allocation method (Static/Dynamic)
"""
return pulumi.get(self, "private_ip_allocation_method")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicIPAddress")
def public_ip_address(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the PublicIP resource
"""
return pulumi.get(self, "public_ip_address")
@property
@pulumi.getter
def subnet(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the subnet resource
"""
return pulumi.get(self, "subnet")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OutboundNatRuleResponse(dict):
"""
Outbound NAT pool of the loadbalancer
"""
def __init__(__self__, *,
allocated_outbound_ports: int,
backend_address_pool: 'outputs.SubResourceResponse',
etag: Optional[str] = None,
frontend_ip_configurations: Optional[Sequence['outputs.SubResourceResponse']] = None,
id: Optional[str] = None,
name: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Outbound NAT pool of the loadbalancer
:param int allocated_outbound_ports: Gets or sets the number of outbound ports to be used for SNAT
:param 'SubResourceResponseArgs' backend_address_pool: Gets or sets a reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs
:param str etag: A unique read-only string that changes whenever the resource is updated
:param Sequence['SubResourceResponseArgs'] frontend_ip_configurations: Gets or sets Frontend IP addresses of the load balancer
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
pulumi.set(__self__, "allocated_outbound_ports", allocated_outbound_ports)
pulumi.set(__self__, "backend_address_pool", backend_address_pool)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if frontend_ip_configurations is not None:
pulumi.set(__self__, "frontend_ip_configurations", frontend_ip_configurations)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="allocatedOutboundPorts")
def allocated_outbound_ports(self) -> int:
"""
Gets or sets the number of outbound ports to be used for SNAT
"""
return pulumi.get(self, "allocated_outbound_ports")
@property
@pulumi.getter(name="backendAddressPool")
def backend_address_pool(self) -> 'outputs.SubResourceResponse':
"""
Gets or sets a reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs
"""
return pulumi.get(self, "backend_address_pool")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfigurations")
def frontend_ip_configurations(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets or sets Frontend IP addresses of the load balancer
"""
return pulumi.get(self, "frontend_ip_configurations")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ProbeResponse(dict):
"""
Load balancer Probe
"""
def __init__(__self__, *,
port: int,
protocol: str,
etag: Optional[str] = None,
id: Optional[str] = None,
interval_in_seconds: Optional[int] = None,
load_balancing_rules: Optional[Sequence['outputs.SubResourceResponse']] = None,
name: Optional[str] = None,
number_of_probes: Optional[int] = None,
provisioning_state: Optional[str] = None,
request_path: Optional[str] = None):
"""
Load balancer Probe
:param int port: Gets or sets Port for communicating the probe. Possible values range from 1 to 65535, inclusive.
:param str protocol: Gets or sets the protocol of the end point. Possible values are http pr Tcp. If Tcp is specified, a received ACK is required for the probe to be successful. If http is specified,a 200 OK response from the specifies URI is required for the probe to be successful
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param int interval_in_seconds: Gets or sets the interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5
:param Sequence['SubResourceResponseArgs'] load_balancing_rules: Gets Load balancer rules that use this probe
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param int number_of_probes: Gets or sets the number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param str request_path: Gets or sets the URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if interval_in_seconds is not None:
pulumi.set(__self__, "interval_in_seconds", interval_in_seconds)
if load_balancing_rules is not None:
pulumi.set(__self__, "load_balancing_rules", load_balancing_rules)
if name is not None:
pulumi.set(__self__, "name", name)
if number_of_probes is not None:
pulumi.set(__self__, "number_of_probes", number_of_probes)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
@property
@pulumi.getter
def port(self) -> int:
"""
Gets or sets Port for communicating the probe. Possible values range from 1 to 65535, inclusive.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
Gets or sets the protocol of the end point. Possible values are http pr Tcp. If Tcp is specified, a received ACK is required for the probe to be successful. If http is specified,a 200 OK response from the specifies URI is required for the probe to be successful
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="intervalInSeconds")
def interval_in_seconds(self) -> Optional[int]:
"""
Gets or sets the interval, in seconds, for how frequently to probe the endpoint for health status. Typically, the interval is slightly less than half the allocated timeout period (in seconds) which allows two full probes before taking the instance out of rotation. The default value is 15, the minimum value is 5
"""
return pulumi.get(self, "interval_in_seconds")
@property
@pulumi.getter(name="loadBalancingRules")
def load_balancing_rules(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets Load balancer rules that use this probe
"""
return pulumi.get(self, "load_balancing_rules")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="numberOfProbes")
def number_of_probes(self) -> Optional[int]:
"""
Gets or sets the number of probes where if no response, will result in stopping further traffic from being delivered to the endpoint. This values allows endpoints to be taken out of rotation faster or slower than the typical times used in Azure.
"""
return pulumi.get(self, "number_of_probes")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
Gets or sets the URI used for requesting health status from the VM. Path is required if a protocol is set to http. Otherwise, it is not allowed. There is no default value
"""
return pulumi.get(self, "request_path")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PublicIpAddressDnsSettingsResponse(dict):
"""
Contains FQDN of the DNS record associated with the public IP address
"""
def __init__(__self__, *,
domain_name_label: Optional[str] = None,
fqdn: Optional[str] = None,
reverse_fqdn: Optional[str] = None):
"""
Contains FQDN of the DNS record associated with the public IP address
:param str domain_name_label: Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system.
:param str fqdn: Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
:param str reverse_fqdn: Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
"""
if domain_name_label is not None:
pulumi.set(__self__, "domain_name_label", domain_name_label)
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if reverse_fqdn is not None:
pulumi.set(__self__, "reverse_fqdn", reverse_fqdn)
@property
@pulumi.getter(name="domainNameLabel")
def domain_name_label(self) -> Optional[str]:
"""
Gets or sets the Domain name label.The concatenation of the domain name label and the regionalized DNS zone make up the fully qualified domain name associated with the public IP address. If a domain name label is specified, an A DNS record is created for the public IP in the Microsoft Azure DNS system.
"""
return pulumi.get(self, "domain_name_label")
@property
@pulumi.getter
def fqdn(self) -> Optional[str]:
"""
Gets the FQDN, Fully qualified domain name of the A DNS record associated with the public IP. This is the concatenation of the domainNameLabel and the regionalized DNS zone.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="reverseFqdn")
def reverse_fqdn(self) -> Optional[str]:
"""
Gets or Sets the Reverse FQDN. A user-visible, fully qualified domain name that resolves to this public IP address. If the reverseFqdn is specified, then a PTR DNS record is created pointing from the IP address in the in-addr.arpa domain to the reverse FQDN.
"""
return pulumi.get(self, "reverse_fqdn")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouteResponse(dict):
"""
Route resource
"""
def __init__(__self__, *,
next_hop_type: str,
address_prefix: Optional[str] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
next_hop_ip_address: Optional[str] = None,
provisioning_state: Optional[str] = None):
"""
Route resource
:param str next_hop_type: Gets or sets the type of Azure hop the packet should be sent to.
:param str address_prefix: Gets or sets the destination CIDR to which the route applies.
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param str next_hop_ip_address: Gets or sets the IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
:param str provisioning_state: Gets or sets Provisioning state of the resource Updating/Deleting/Failed
"""
pulumi.set(__self__, "next_hop_type", next_hop_type)
if address_prefix is not None:
pulumi.set(__self__, "address_prefix", address_prefix)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if next_hop_ip_address is not None:
pulumi.set(__self__, "next_hop_ip_address", next_hop_ip_address)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="nextHopType")
def next_hop_type(self) -> str:
"""
Gets or sets the type of Azure hop the packet should be sent to.
"""
return pulumi.get(self, "next_hop_type")
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> Optional[str]:
"""
Gets or sets the destination CIDR to which the route applies.
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nextHopIpAddress")
def next_hop_ip_address(self) -> Optional[str]:
"""
Gets or sets the IP address packets should be forwarded to. Next hop values are only allowed in routes where the next hop type is VirtualAppliance.
"""
return pulumi.get(self, "next_hop_ip_address")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityRuleResponse(dict):
"""
Network security rule
"""
def __init__(__self__, *,
access: str,
destination_address_prefix: str,
direction: str,
protocol: str,
source_address_prefix: str,
description: Optional[str] = None,
destination_port_range: Optional[str] = None,
etag: Optional[str] = None,
id: Optional[str] = None,
name: Optional[str] = None,
priority: Optional[int] = None,
provisioning_state: Optional[str] = None,
source_port_range: Optional[str] = None):
"""
Network security rule
:param str access: Gets or sets network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'
:param str destination_address_prefix: Gets or sets destination address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
:param str direction: Gets or sets the direction of the rule.InBound or Outbound. The direction specifies if rule will be evaluated on incoming or outgoing traffic.
:param str protocol: Gets or sets Network protocol this rule applies to. Can be Tcp, Udp or All(*).
:param str source_address_prefix: Gets or sets source address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from.
:param str description: Gets or sets a description for this rule. Restricted to 140 chars.
:param str destination_port_range: Gets or sets Destination Port or Range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param int priority: Gets or sets the priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule.
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param str source_port_range: Gets or sets Source Port or Range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
pulumi.set(__self__, "access", access)
pulumi.set(__self__, "destination_address_prefix", destination_address_prefix)
pulumi.set(__self__, "direction", direction)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "source_address_prefix", source_address_prefix)
if description is not None:
pulumi.set(__self__, "description", description)
if destination_port_range is not None:
pulumi.set(__self__, "destination_port_range", destination_port_range)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if priority is not None:
pulumi.set(__self__, "priority", priority)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source_port_range is not None:
pulumi.set(__self__, "source_port_range", source_port_range)
@property
@pulumi.getter
def access(self) -> str:
"""
Gets or sets network traffic is allowed or denied. Possible values are 'Allow' and 'Deny'
"""
return pulumi.get(self, "access")
@property
@pulumi.getter(name="destinationAddressPrefix")
def destination_address_prefix(self) -> str:
"""
Gets or sets destination address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used.
"""
return pulumi.get(self, "destination_address_prefix")
@property
@pulumi.getter
def direction(self) -> str:
"""
Gets or sets the direction of the rule.InBound or Outbound. The direction specifies if rule will be evaluated on incoming or outgoing traffic.
"""
return pulumi.get(self, "direction")
@property
@pulumi.getter
def protocol(self) -> str:
"""
Gets or sets Network protocol this rule applies to. Can be Tcp, Udp or All(*).
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="sourceAddressPrefix")
def source_address_prefix(self) -> str:
"""
Gets or sets source address prefix. CIDR or source IP range. Asterisk '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from.
"""
return pulumi.get(self, "source_address_prefix")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Gets or sets a description for this rule. Restricted to 140 chars.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="destinationPortRange")
def destination_port_range(self) -> Optional[str]:
"""
Gets or sets Destination Port or Range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
return pulumi.get(self, "destination_port_range")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def priority(self) -> Optional[int]:
"""
Gets or sets the priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourcePortRange")
def source_port_range(self) -> Optional[str]:
"""
Gets or sets Source Port or Range. Integer or range between 0 and 65535. Asterisk '*' can also be used to match all ports.
"""
return pulumi.get(self, "source_port_range")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubResourceResponse(dict):
def __init__(__self__, *,
id: Optional[str] = None):
"""
:param str id: Resource Id
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubnetResponse(dict):
"""
Subnet in a VirtualNetwork resource
"""
def __init__(__self__, *,
address_prefix: str,
etag: Optional[str] = None,
id: Optional[str] = None,
ip_configurations: Optional[Sequence['outputs.SubResourceResponse']] = None,
name: Optional[str] = None,
network_security_group: Optional['outputs.SubResourceResponse'] = None,
provisioning_state: Optional[str] = None,
route_table: Optional['outputs.SubResourceResponse'] = None):
"""
Subnet in a VirtualNetwork resource
:param str address_prefix: Gets or sets Address prefix for the subnet.
:param str etag: A unique read-only string that changes whenever the resource is updated
:param str id: Resource Id
:param Sequence['SubResourceResponseArgs'] ip_configurations: Gets array of references to the network interface IP configurations using subnet
:param str name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource
:param 'SubResourceResponseArgs' network_security_group: Gets or sets the reference of the NetworkSecurityGroup resource
:param str provisioning_state: Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
:param 'SubResourceResponseArgs' route_table: Gets or sets the reference of the RouteTable resource
"""
pulumi.set(__self__, "address_prefix", address_prefix)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if id is not None:
pulumi.set(__self__, "id", id)
if ip_configurations is not None:
pulumi.set(__self__, "ip_configurations", ip_configurations)
if name is not None:
pulumi.set(__self__, "name", name)
if network_security_group is not None:
pulumi.set(__self__, "network_security_group", network_security_group)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
if route_table is not None:
pulumi.set(__self__, "route_table", route_table)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> str:
"""
Gets or sets Address prefix for the subnet.
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipConfigurations")
def ip_configurations(self) -> Optional[Sequence['outputs.SubResourceResponse']]:
"""
Gets array of references to the network interface IP configurations using subnet
"""
return pulumi.get(self, "ip_configurations")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkSecurityGroup")
def network_security_group(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the NetworkSecurityGroup resource
"""
return pulumi.get(self, "network_security_group")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets or sets Provisioning state of the PublicIP resource Updating/Deleting/Failed
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="routeTable")
def route_table(self) -> Optional['outputs.SubResourceResponse']:
"""
Gets or sets the reference of the RouteTable resource
"""
return pulumi.get(self, "route_table")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 42.061894
| 541
| 0.656952
| 15,218
| 125,723
| 5.251084
| 0.035681
| 0.019672
| 0.032536
| 0.049931
| 0.865751
| 0.828347
| 0.793207
| 0.757668
| 0.726258
| 0.697238
| 0
| 0.001432
| 0.261305
| 125,723
| 2,988
| 542
| 42.075971
| 0.85902
| 0.357071
| 0
| 0.702063
| 1
| 0
| 0.138835
| 0.071701
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166262
| false
| 0.003034
| 0.003641
| 0.019417
| 0.336165
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3255dceaefdd3538fa112a8a352418f13e7e895
| 2,816
|
py
|
Python
|
tests/realtime/test_realtime_BusGroup_allocate.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 191
|
2015-11-13T02:28:42.000Z
|
2022-03-29T10:26:44.000Z
|
tests/realtime/test_realtime_BusGroup_allocate.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 130
|
2016-01-04T16:59:02.000Z
|
2022-02-26T15:37:20.000Z
|
tests/realtime/test_realtime_BusGroup_allocate.py
|
butayama/supriya
|
0c197324ecee4232381221880d1f40e109bb756c
|
[
"MIT"
] | 22
|
2016-05-04T10:32:16.000Z
|
2022-02-26T19:22:45.000Z
|
import supriya.realtime
import supriya.synthdefs
def test_01(server):
bus_group_one = supriya.realtime.BusGroup(
bus_count=4, calculation_rate=supriya.CalculationRate.CONTROL
)
assert not bus_group_one.is_allocated
assert bus_group_one.bus_id is None
assert bus_group_one.server is None
assert len(bus_group_one) == 4
for bus in bus_group_one:
assert not bus.is_allocated
assert bus.bus_group is bus_group_one
assert bus.bus_id is None
assert bus.calculation_rate == bus_group_one.calculation_rate
bus_group_one.allocate()
server.sync()
assert bus_group_one.is_allocated
assert bus_group_one.bus_id == 0
assert bus_group_one.server is server
assert len(bus_group_one) == 4
for i, bus in enumerate(bus_group_one):
assert bus.is_allocated
assert bus.bus_group is bus_group_one
assert bus.bus_id == bus_group_one.bus_id + i
assert bus.calculation_rate == bus_group_one.calculation_rate
bus_group_two = supriya.realtime.BusGroup(
bus_count=4, calculation_rate=supriya.CalculationRate.CONTROL
)
server.sync()
assert not bus_group_two.is_allocated
assert bus_group_two.bus_id is None
assert bus_group_two.server is None
assert len(bus_group_two) == 4
for bus in bus_group_two:
assert not bus.is_allocated
assert bus.bus_group is bus_group_two
assert bus.bus_id is None
assert bus.calculation_rate == bus_group_two.calculation_rate
bus_group_two.allocate()
server.sync()
assert bus_group_two.is_allocated
assert bus_group_two.bus_id == 4
assert bus_group_two.server is server
assert len(bus_group_two) == 4
for i, bus in enumerate(bus_group_two):
assert bus.is_allocated
assert bus.bus_group is bus_group_two
assert bus.bus_id is bus_group_two.bus_id + i
assert bus.calculation_rate == bus_group_two.calculation_rate
bus_group_one.free()
server.sync()
assert not bus_group_one.is_allocated
assert bus_group_one.bus_id is None
assert bus_group_one.server is None
assert len(bus_group_one) == 4
for bus in bus_group_one:
assert not bus.is_allocated
assert bus.bus_group is bus_group_one
assert bus.bus_id is None
assert bus.calculation_rate == bus_group_one.calculation_rate
bus_group_two.free()
server.sync()
assert not bus_group_two.is_allocated
assert bus_group_two.bus_id is None
assert bus_group_two.server is None
assert len(bus_group_two) == 4
for bus in bus_group_two:
assert not bus.is_allocated
assert bus.bus_group is bus_group_two
assert bus.bus_id is None
assert bus.calculation_rate == bus_group_two.calculation_rate
| 32.367816
| 69
| 0.721236
| 443
| 2,816
| 4.248307
| 0.076749
| 0.238045
| 0.146121
| 0.127524
| 0.962274
| 0.942614
| 0.899044
| 0.853348
| 0.824655
| 0.818278
| 0
| 0.005482
| 0.222656
| 2,816
| 86
| 70
| 32.744186
| 0.854271
| 0
| 0
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.013889
| false
| 0
| 0.027778
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c34a1cd84f5a2fe616d4ada70c19ef0058701cd2
| 5,386
|
py
|
Python
|
ctweb/ctapp/migrations/0001_initial.py
|
manas-16/Minor-Project-2
|
d03cb1e7cb57c294bdfc12e9d59237a67458fdbd
|
[
"MIT"
] | null | null | null |
ctweb/ctapp/migrations/0001_initial.py
|
manas-16/Minor-Project-2
|
d03cb1e7cb57c294bdfc12e9d59237a67458fdbd
|
[
"MIT"
] | null | null | null |
ctweb/ctapp/migrations/0001_initial.py
|
manas-16/Minor-Project-2
|
d03cb1e7cb57c294bdfc12e9d59237a67458fdbd
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-07 06:21
import ctapp.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='assignment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('topic', models.CharField(max_length=50)),
('last_date', models.DateField()),
],
),
migrations.CreateModel(
name='Class',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sem', models.CharField(choices=[('1', 1), ('2', 2), ('3', 3), ('4', 4), ('5', 5), ('6', 6), ('7', 7), ('8', 8)], default='6', max_length=20)),
('sec', models.CharField(choices=[('A', 'A'), ('B', 'B'), ('C', 'C')], default='A', max_length=1)),
('branch', models.CharField(choices=[('IT', 'IT'), ('CS', 'CS'), ('EC', 'EC'), ('MECH', 'MECH'), ('EE', 'EE'), ('EX', 'EX'), ('CIVIL', 'CIVIL')], default='IT', max_length=20)),
],
),
migrations.CreateModel(
name='student',
fields=[
('enrollment_number', models.CharField(max_length=12, primary_key=True, serialize=False)),
('name', models.CharField(max_length=30)),
('college_name', models.CharField(choices=[('LNCT', 'LNCT'), ('LNCTS', 'LNCTS'), ('LNCTE', 'LNCTE')], default='LNCT', max_length=10)),
('sem', models.CharField(choices=[('1', 1), ('2', 2), ('3', 3), ('4', 4), ('5', 5), ('6', 6), ('7', 7), ('8', 8)], default='6', max_length=20)),
('sec', models.CharField(choices=[('A', 'A'), ('B', 'B'), ('C', 'C')], default='A', max_length=1)),
('branch', models.CharField(choices=[('IT', 'IT'), ('CS', 'CS'), ('EC', 'EC'), ('MECH', 'MECH'), ('EE', 'EE'), ('EX', 'EX'), ('CIVIL', 'CIVIL')], default='IT', max_length=20)),
('mobile_no', models.CharField(max_length=10)),
('email', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='subject',
fields=[
('subject_code', models.CharField(max_length=8, primary_key=True, serialize=False)),
('name', models.CharField(max_length=30)),
('branch', models.CharField(choices=[('IT', 'IT'), ('CS', 'CS'), ('EC', 'EC'), ('MECH', 'MECH'), ('EE', 'EE'), ('EX', 'EX'), ('CIVIL', 'CIVIL')], default='IT', max_length=20)),
('sem', models.CharField(choices=[('1', 1), ('2', 2), ('3', 3), ('4', 4), ('5', 5), ('6', 6), ('7', 7), ('8', 8)], default='6', max_length=20)),
],
),
migrations.CreateModel(
name='teacher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('college_name', models.CharField(choices=[('LNCT', 'LNCT'), ('LNCTS', 'LNCTS'), ('LNCTE', 'LNCTE')], default='LNCT', max_length=10)),
('branch', models.CharField(choices=[('IT', 'IT'), ('CS', 'CS'), ('EC', 'EC'), ('MECH', 'MECH'), ('EE', 'EE'), ('EX', 'EX'), ('CIVIL', 'CIVIL')], default='IT', max_length=20)),
('mobile_no', models.CharField(max_length=10)),
('email', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='teacher_assign',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('c_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.class')),
('s_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.subject')),
('t_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.teacher')),
],
),
migrations.CreateModel(
name='student_submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to=ctapp.models.user_directory_path)),
('a_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.assignment')),
('stud_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.student')),
],
),
migrations.AddField(
model_name='assignment',
name='c_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.class'),
),
migrations.AddField(
model_name='assignment',
name='s_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.subject'),
),
migrations.AddField(
model_name='assignment',
name='t_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ctapp.teacher'),
),
]
| 53.86
| 192
| 0.528593
| 591
| 5,386
| 4.700508
| 0.170897
| 0.113391
| 0.087113
| 0.086393
| 0.803096
| 0.803096
| 0.740821
| 0.740821
| 0.740821
| 0.740821
| 0
| 0.02625
| 0.257334
| 5,386
| 99
| 193
| 54.40404
| 0.66825
| 0.008355
| 0
| 0.652174
| 1
| 0
| 0.125679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032609
| 0
| 0.076087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c381797db2f6dc21ad13041f5c17199b50f8a7b0
| 16,123
|
py
|
Python
|
tgt_grease/enterprise/Sources/tests/test_sql_parser.py
|
DavidPRolfe/grease
|
786725b15f530c5540309dc8ad96efb5d095bd6d
|
[
"MIT"
] | null | null | null |
tgt_grease/enterprise/Sources/tests/test_sql_parser.py
|
DavidPRolfe/grease
|
786725b15f530c5540309dc8ad96efb5d095bd6d
|
[
"MIT"
] | null | null | null |
tgt_grease/enterprise/Sources/tests/test_sql_parser.py
|
DavidPRolfe/grease
|
786725b15f530c5540309dc8ad96efb5d095bd6d
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from tgt_grease.enterprise.Sources import sql_source
from tgt_grease.core import Configuration
import json
import os
import psycopg2
import datetime
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
class TestSQLSource(TestCase):
def test_type(self):
inst = sql_source()
self.assertTrue(isinstance(inst, sql_source))
def __ensure_schema(self):
# Helper function
if not os.environ.get('GREASE_TEST_DSN'):
os.environ['GREASE_TEST_DSN'] = "host=localhost user=postgres"
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cursor:
# just to make sure nothing exists
try:
cursor.execute("""
SELECT
pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname='test_data'
""")
cursor.execute("""
DROP DATABASE test_data;
""")
except:
print("Exception occurred during ensure schema... most of the time this is fine")
try:
cursor.execute("""
CREATE DATABASE test_data;
""")
except psycopg2.ProgrammingError as e:
print("Schema Exists: {0}".format(e.pgerror))
if not os.environ.get('GREASE_TEST_DSN_ORIGINAL'):
os.environ['GREASE_TEST_DSN_ORIGINAL'] = os.environ.get('GREASE_TEST_DSN')
os.environ['GREASE_TEST_DSN'] = os.environ['GREASE_TEST_DSN'] + " dbname=test_data"
def __cleanup_schema(self):
with psycopg2.connect(os.environ['GREASE_TEST_DSN_ORIGINAL']) as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cursor:
cursor.execute("""
SELECT
pg_terminate_backend(pid)
FROM pg_stat_activity
WHERE datname='test_data'
""")
try:
cursor.execute("""
DROP DATABASE test_data;
""")
except psycopg2.ProgrammingError as e:
print("Schema Does Not Exist: {0}".format(e.pgerror))
os.environ['GREASE_TEST_DSN'] = os.environ['GREASE_TEST_DSN_ORIGINAL']
def test_sql_parser_mock(self):
source = sql_source()
conf = Configuration()
mock = {
'id': 1,
'name_fs': 'sally',
'name_ls': 'sue'
}
fil = open(conf.greaseDir + 'etc' + conf.fs_sep + 'test.mock.sql.json', 'w')
fil.write(json.dumps(mock))
fil.close()
mockData = source.mock_data({})
self.assertEqual(len(mockData), 1)
self.assertEqual(mock.get('id'), 1)
self.assertEqual(mock.get('name_fs'), mockData[0].get('name_fs'))
self.assertEqual(mock.get('name_ls'), mockData[0].get('name_ls'))
os.remove(conf.greaseDir + 'etc' + conf.fs_sep + 'test.mock.sql.json')
def test_sql_parser(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 1)
self.assertIsInstance(data[0], dict)
self.assertEqual(data[0].get('id'), 1)
self.assertEqual(data[0].get('name_fs'), 'sally')
self.assertEqual(data[0].get('name_ls'), 'sue')
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_bad_type(self):
source = sql_source()
self.assertFalse(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'mssql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 0)
def test_sql_parser_hour_good(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'hour': datetime.datetime.utcnow().hour,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 1)
self.assertIsInstance(data[0], dict)
self.assertEqual(data[0].get('id'), 1)
self.assertEqual(data[0].get('name_fs'), 'sally')
self.assertEqual(data[0].get('name_ls'), 'sue')
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_minute_good(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'minute': datetime.datetime.utcnow().minute,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 1)
self.assertIsInstance(data[0], dict)
self.assertEqual(data[0].get('id'), 1)
self.assertEqual(data[0].get('name_fs'), 'sally')
self.assertEqual(data[0].get('name_ls'), 'sue')
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_hour_and_minute_good(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'hour': datetime.datetime.utcnow().hour,
'minute': datetime.datetime.utcnow().minute,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 1)
self.assertIsInstance(data[0], dict)
self.assertEqual(data[0].get('id'), 1)
self.assertEqual(data[0].get('name_fs'), 'sally')
self.assertEqual(data[0].get('name_ls'), 'sue')
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_hour_bad(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'hour': (datetime.datetime.utcnow() + datetime.timedelta(hours=6)).hour,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 0)
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_minute_bad(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'minute': (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)).minute,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 0)
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
def test_sql_parser_hour_and_minute_bad(self):
self.__ensure_schema()
with psycopg2.connect(os.environ['GREASE_TEST_DSN']) as conn:
with conn.cursor() as cursor:
source = sql_source()
cursor.execute("""
CREATE TABLE IF NOT EXISTS
test_data
(
id SERIAL PRIMARY KEY NOT NULL,
name_fs VARCHAR,
name_ls VARCHAR
);
""")
conn.commit()
cursor.execute("""
INSERT INTO
test_data
(name_fs, name_ls)
VALUES
('sally', 'sue');
""")
conn.commit()
self.assertTrue(source.parse_source({
'name': 'example_source',
'job': 'example_job',
'exe_env': 'general',
'source': 'sql_source',
'type': 'postgresql',
'dsn': 'GREASE_TEST_DSN',
'query': 'select * from test_data;',
'hour': (datetime.datetime.utcnow() + datetime.timedelta(hours=6)).hour,
'minute': (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)).minute,
'logic': {}
}))
data = source.get_data()
self.assertTrue(isinstance(data, list))
self.assertEqual(len(data), 0)
cursor.execute("""
DROP TABLE public.test_data
""")
self.__cleanup_schema()
| 39.32439
| 101
| 0.44297
| 1,408
| 16,123
| 4.857955
| 0.106534
| 0.040936
| 0.049415
| 0.041667
| 0.878363
| 0.864035
| 0.85614
| 0.847953
| 0.836842
| 0.836842
| 0
| 0.00609
| 0.45004
| 16,123
| 409
| 102
| 39.420538
| 0.76531
| 0.002977
| 0
| 0.84949
| 0
| 0
| 0.374627
| 0.009084
| 0
| 0
| 0
| 0
| 0.114796
| 1
| 0.030612
| false
| 0
| 0.020408
| 0
| 0.053571
| 0.007653
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c382e5bee28fc5f46cf432727e913b1cf5d8b02e
| 11,910
|
py
|
Python
|
chapter2/bert/scripts/ms_and_tf_checkpoint_transfer_tools.py
|
hu-qi/mindspore-21-days-tutorials
|
a9d3a024a0b17e48ae758de66195923b14f8ea6c
|
[
"Apache-2.0"
] | null | null | null |
chapter2/bert/scripts/ms_and_tf_checkpoint_transfer_tools.py
|
hu-qi/mindspore-21-days-tutorials
|
a9d3a024a0b17e48ae758de66195923b14f8ea6c
|
[
"Apache-2.0"
] | null | null | null |
chapter2/bert/scripts/ms_and_tf_checkpoint_transfer_tools.py
|
hu-qi/mindspore-21-days-tutorials
|
a9d3a024a0b17e48ae758de66195923b14f8ea6c
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
mindspore and tensorflow checkpoint transfer tools
"""
import argparse
import tensorflow as tf
from mindspore.common.tensor import Tensor
from mindspore.train.serialization import load_checkpoint, save_checkpoint
from ms2tf_config import param_name_dict as ms2tf_param_dict
def convert_ms_2_tf(tf_ckpt_path, ms_ckpt_path, new_ckpt_path):
"""
convert ms checkpoint to tf checkpoint
"""
# load MS checkpoint
ms_param_dict = load_checkpoint(ms_ckpt_path)
for name in ms_param_dict.keys():
if isinstance(ms_param_dict[name].data, Tensor):
ms_param_dict[name] = ms_param_dict[name].data.asnumpy()
convert_count = 0
with tf.Session() as sess:
# convert ms shape to tf
print("start convert parameter ...")
new_var_list = []
for var_name, shape in tf.contrib.framework.list_variables(tf_ckpt_path):
if var_name in ms2tf_param_dict:
ms_name = ms2tf_param_dict[var_name]
new_tensor = tf.convert_to_tensor(ms_param_dict[ms_name])
if len(shape) == 2:
if tuple(shape) != new_tensor.shape or new_tensor.shape[0] == new_tensor.shape[1]:
new_tensor = tf.transpose(new_tensor, (1, 0))
if new_tensor.shape != tuple(shape):
raise ValueError("shape is not matched after transpose!! {}, {}"
.format(str(new_tensor.shape), str(tuple(shape))))
if new_tensor.shape != tuple(shape):
raise ValueError("shape is not matched after transpose!! {}, {}"
.format(str(new_tensor.shape), str(tuple(shape))))
var = tf.Variable(new_tensor, name=var_name)
convert_count = convert_count + 1
else:
var = tf.Variable(tf.contrib.framework.load_variable(tf_ckpt_path, var_name), name=var_name)
new_var_list.append(var)
print('convert value num: ', convert_count, " of ", len(ms2tf_param_dict))
# saving tf checkpoint
print("start saving ...")
saver = tf.train.Saver(var_list=new_var_list)
sess.run(tf.global_variables_initializer())
saver.save(sess, new_ckpt_path)
print("tf checkpoint was save in :", new_ckpt_path)
return True
def convert_tf_2_ms(tf_ckpt_path, ms_ckpt_path, new_ckpt_path):
"""
convert tf checkpoint to ms checkpoint
"""
tf2ms_param_dict = dict(zip(ms2tf_param_dict.values(), ms2tf_param_dict.keys()))
# load MS checkpoint
ms_param_dict = load_checkpoint(ms_ckpt_path)
new_params_list = []
session = tf.compat.v1.Session()
count = 0
for ms_name in tf2ms_param_dict.keys():
count += 1
param_dict = {}
tf_name = tf2ms_param_dict[ms_name]
data = tf.train.load_variable(tf_ckpt_path, tf_name)
ms_shape = ms_param_dict[ms_name].data.shape
tf_shape = data.shape
if len(ms_shape) == 2:
if ms_shape != tf_shape or ms_shape[0] == ms_shape[1]:
data = tf.transpose(data, (1, 0))
data = data.eval(session=session)
param_dict['name'] = ms_name
param_dict['data'] = Tensor(data)
new_params_list.append(param_dict)
print("start saving checkpoint ...")
save_checkpoint(new_params_list, new_ckpt_path)
print("ms checkpoint was save in :", new_ckpt_path)
return True
def main():
"""
tf checkpoint transfer to ms or ms checkpoint transfer to tf
"""
parser = argparse.ArgumentParser(description='checkpoint transfer.')
parser.add_argument("--tf_ckpt_path", type=str, default='./tf-bert/bs64k_32k_ckpt_model.ckpt-28252',
help="TensorFlow checkpoint dir, default is: './tf-bert/bs64k_32k_ckpt_model.ckpt-28252'.")
parser.add_argument("--ms_ckpt_path", type=str, default='./ms-bert/large_en.ckpt',
help="MindSpore checkpoint dir, default is: './ms-bert/large_en.ckpt'.")
parser.add_argument("--new_ckpt_path", type=str, default='./new_ckpt/new_bert_large_en.ckpt',
help="New checkpoint dir, default is: './new_ckpt/new_bert_large_en.ckpt'.")
parser.add_argument("--transfer_option", type=str, default='ms2tf',
help="option of transfer ms2tf or tf2ms, default is ms2tf.")
args_opt = parser.parse_args()
if args_opt.transfer_option == 'ms2tf':
print("start ms2tf option ...")
tf_ckpt_path = args_opt.tf_ckpt_path
ms_ckpt_path = args_opt.ms_ckpt_path
new_ckpt_path = args_opt.new_ckpt_path
convert_ms_2_tf(tf_ckpt_path, ms_ckpt_path, new_ckpt_path)
elif args_opt.transfer_option == 'tf2ms':
print("start tf2ms option ...")
tf_ckpt_path = args_opt.tf_ckpt_path
ms_ckpt_path = args_opt.ms_ckpt_path
new_ckpt_path = args_opt.new_ckpt_path
convert_tf_2_ms(tf_ckpt_path, ms_ckpt_path, new_ckpt_path)
else:
print("ERROR: '--transfer_option' please select 0 or 1")
if __name__ == "__main__":
main()
=======
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
mindspore and tensorflow checkpoint transfer tools
"""
import argparse
import tensorflow as tf
from mindspore.common.tensor import Tensor
from mindspore.train.serialization import load_checkpoint, save_checkpoint
from ms2tf_config import param_name_dict as ms2tf_param_dict
def convert_ms_2_tf(tf_ckpt_path, ms_ckpt_path, new_ckpt_path):
"""
convert ms checkpoint to tf checkpoint
"""
# load MS checkpoint
ms_param_dict = load_checkpoint(ms_ckpt_path)
for name in ms_param_dict.keys():
if isinstance(ms_param_dict[name].data, Tensor):
ms_param_dict[name] = ms_param_dict[name].data.asnumpy()
convert_count = 0
with tf.Session() as sess:
# convert ms shape to tf
print("start convert parameter ...")
new_var_list = []
for var_name, shape in tf.contrib.framework.list_variables(tf_ckpt_path):
if var_name in ms2tf_param_dict:
ms_name = ms2tf_param_dict[var_name]
new_tensor = tf.convert_to_tensor(ms_param_dict[ms_name])
if len(shape) == 2:
if tuple(shape) != new_tensor.shape or new_tensor.shape[0] == new_tensor.shape[1]:
new_tensor = tf.transpose(new_tensor, (1, 0))
if new_tensor.shape != tuple(shape):
raise ValueError("shape is not matched after transpose!! {}, {}"
.format(str(new_tensor.shape), str(tuple(shape))))
if new_tensor.shape != tuple(shape):
raise ValueError("shape is not matched after transpose!! {}, {}"
.format(str(new_tensor.shape), str(tuple(shape))))
var = tf.Variable(new_tensor, name=var_name)
convert_count = convert_count + 1
else:
var = tf.Variable(tf.contrib.framework.load_variable(tf_ckpt_path, var_name), name=var_name)
new_var_list.append(var)
print('convert value num: ', convert_count, " of ", len(ms2tf_param_dict))
# saving tf checkpoint
print("start saving ...")
saver = tf.train.Saver(var_list=new_var_list)
sess.run(tf.global_variables_initializer())
saver.save(sess, new_ckpt_path)
print("tf checkpoint was save in :", new_ckpt_path)
return True
def convert_tf_2_ms(tf_ckpt_path, ms_ckpt_path, new_ckpt_path):
"""
convert tf checkpoint to ms checkpoint
"""
tf2ms_param_dict = dict(zip(ms2tf_param_dict.values(), ms2tf_param_dict.keys()))
# load MS checkpoint
ms_param_dict = load_checkpoint(ms_ckpt_path)
new_params_list = []
session = tf.compat.v1.Session()
count = 0
for ms_name in tf2ms_param_dict.keys():
count += 1
param_dict = {}
tf_name = tf2ms_param_dict[ms_name]
data = tf.train.load_variable(tf_ckpt_path, tf_name)
ms_shape = ms_param_dict[ms_name].data.shape
tf_shape = data.shape
if len(ms_shape) == 2:
if ms_shape != tf_shape or ms_shape[0] == ms_shape[1]:
data = tf.transpose(data, (1, 0))
data = data.eval(session=session)
param_dict['name'] = ms_name
param_dict['data'] = Tensor(data)
new_params_list.append(param_dict)
print("start saving checkpoint ...")
save_checkpoint(new_params_list, new_ckpt_path)
print("ms checkpoint was save in :", new_ckpt_path)
return True
def main():
"""
tf checkpoint transfer to ms or ms checkpoint transfer to tf
"""
parser = argparse.ArgumentParser(description='checkpoint transfer.')
parser.add_argument("--tf_ckpt_path", type=str, default='./tf-bert/bs64k_32k_ckpt_model.ckpt-28252',
help="TensorFlow checkpoint dir, default is: './tf-bert/bs64k_32k_ckpt_model.ckpt-28252'.")
parser.add_argument("--ms_ckpt_path", type=str, default='./ms-bert/large_en.ckpt',
help="MindSpore checkpoint dir, default is: './ms-bert/large_en.ckpt'.")
parser.add_argument("--new_ckpt_path", type=str, default='./new_ckpt/new_bert_large_en.ckpt',
help="New checkpoint dir, default is: './new_ckpt/new_bert_large_en.ckpt'.")
parser.add_argument("--transfer_option", type=str, default='ms2tf',
help="option of transfer ms2tf or tf2ms, default is ms2tf.")
args_opt = parser.parse_args()
if args_opt.transfer_option == 'ms2tf':
print("start ms2tf option ...")
tf_ckpt_path = args_opt.tf_ckpt_path
ms_ckpt_path = args_opt.ms_ckpt_path
new_ckpt_path = args_opt.new_ckpt_path
convert_ms_2_tf(tf_ckpt_path, ms_ckpt_path, new_ckpt_path)
elif args_opt.transfer_option == 'tf2ms':
print("start tf2ms option ...")
tf_ckpt_path = args_opt.tf_ckpt_path
ms_ckpt_path = args_opt.ms_ckpt_path
new_ckpt_path = args_opt.new_ckpt_path
convert_tf_2_ms(tf_ckpt_path, ms_ckpt_path, new_ckpt_path)
else:
print("ERROR: '--transfer_option' please select 0 or 1")
if __name__ == "__main__":
main()
>>>>>>> 5dd9ae11b0d1da91ead0c0bbed2fdc0878a736e6
| 40.787671
| 116
| 0.634089
| 1,598
| 11,910
| 4.440551
| 0.103254
| 0.081172
| 0.040304
| 0.025648
| 0.993799
| 0.993799
| 0.993799
| 0.993799
| 0.993799
| 0.993799
| 0
| 0.016743
| 0.252813
| 11,910
| 291
| 117
| 40.927835
| 0.78065
| 0.120991
| 0
| 0.983607
| 0
| 0
| 0.163944
| 0.041445
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.054645
| null | null | 0.098361
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f1509d73a44faf97db8ca92809e2658e09cccd0
| 11,351
|
py
|
Python
|
moscow_routes_parser/test_model_impl.py
|
rscprof/moscow_routes_parser
|
692627dd43d62f70e3e12a761897571c79a022a0
|
[
"MIT"
] | null | null | null |
moscow_routes_parser/test_model_impl.py
|
rscprof/moscow_routes_parser
|
692627dd43d62f70e3e12a761897571c79a022a0
|
[
"MIT"
] | null | null | null |
moscow_routes_parser/test_model_impl.py
|
rscprof/moscow_routes_parser
|
692627dd43d62f70e3e12a761897571c79a022a0
|
[
"MIT"
] | null | null | null |
from datetime import time, date
from unittest import TestCase
from moscow_routes_parser.model_impl import Stop_impl, Stop_builder_impl, Timetable_stop_time_t_mos_ru, \
Timetable_t_mos_ru, Timetable_stop_builder_t_mos_ru, Timetable_builder_t_mos_ru
class TestStop_impl(TestCase):
def test_stop(self):
name = "Stop"
coords = (10.2, 13.3)
stop = Stop_impl(name, coords)
self.assertEqual(stop.get_name(), name)
self.assertEqual(stop.get_coords(), coords)
def test_eq(self):
name = "Stop"
coords = (37.574604,55.499970)
name2 = "Stop"
coords2 = (37.5746041,55.4999701)
name3 = "Stop"
coords3 = (37.574605,55.499971)
stop = Stop_impl(name,coords)
stop2 = Stop_impl(name2,coords2)
stop3 = Stop_impl(name3,coords3)
self.assertTrue(stop==stop2)
self.assertFalse(stop==stop3)
class TestStop_builder_impl(TestCase):
def test_builder(self):
name = "Stop"
stop = Stop_builder_impl().set_name(name).set_coords((10, 20)).build()
self.assertEqual(stop.get_name(), name)
self.assertEqual(stop.get_coords(), (10, 20))
def test_builder_reverse(self):
name = "StopStop"
stop = Stop_builder_impl().set_coords((10, 20)).set_name(name).build()
self.assertEqual(stop.get_name(), name)
self.assertEqual(stop.get_coords(), (10, 20))
class TestTimetable_stop_time_t_mos_ru(TestCase):
def test_timetable_stop_time(self):
t = time(10, 15)
color = "Red"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
self.assertEqual(timetable_stop_time.get_time(), t)
self.assertEqual(timetable_stop_time.get_color_special_flight(), color)
def test_timetable_stop_time_none_color(self):
t = time(10, 15)
color = None
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
self.assertEqual(timetable_stop_time.get_time(), t)
self.assertEqual(timetable_stop_time.get_color_special_flight(), color)
def test_timetable_stop_time_equal_true(self):
t = time(10, 15)
color = "Red"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
t2 = time(10, 15)
color2 = "Red"
timetable_stop_time2 = Timetable_stop_time_t_mos_ru(t2, color2)
self.assertTrue(timetable_stop_time == timetable_stop_time2)
self.assertFalse(timetable_stop_time != timetable_stop_time2)
def test_timetable_stop_time_equal_false_color(self):
t = time(10, 15)
color = "Blue"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
t2 = time(10, 15)
color2 = "Red"
timetable_stop_time2 = Timetable_stop_time_t_mos_ru(t2, color2)
self.assertFalse(timetable_stop_time == timetable_stop_time2)
self.assertTrue(timetable_stop_time != timetable_stop_time2)
def test_timetable_stop_time_equal_false_time(self):
t = time(10, 16)
color = "Red"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
t2 = time(10, 15)
color2 = "Red"
timetable_stop_time2 = Timetable_stop_time_t_mos_ru(t2, color2)
self.assertFalse(timetable_stop_time == timetable_stop_time2)
self.assertTrue(timetable_stop_time != timetable_stop_time2)
def test_timetable_stop_time_equal_false_time_color(self):
t = time(10, 16)
color = "Blue"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
t2 = time(10, 15)
color2 = "Red"
timetable_stop_time2 = Timetable_stop_time_t_mos_ru(t2, color2)
self.assertFalse(timetable_stop_time == timetable_stop_time2)
self.assertTrue(timetable_stop_time != timetable_stop_time2)
def test_timetable_stop_time_str_without_color(self):
t = time(23, 59)
color = None
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
self.assertEqual(str(timetable_stop_time), "23:59")
self.assertEqual(repr(timetable_stop_time), "23:59")
def test_timetable_stop_time_str_with_color(self):
t = time(1, 1)
color = "Red"
timetable_stop_time = Timetable_stop_time_t_mos_ru(t, color)
self.assertEqual(str(timetable_stop_time), "01:01 (Red)")
self.assertEqual(repr(timetable_stop_time), "01:01 (Red)")
# Timetable_stop don't use without builder
# class TestTimetable_stop_t_mos_ru(TestCase):
# def test_timetable_stop(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# self.assertEqual(stop.get_name(), name)
# self.assertEqual(list(stop.get_times()), times)
# self.assertEqual(stop.get_coords(), [10, 20])
#
# def test_eq(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# name2 = "Улица Иванова"
# times2 = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop2 = Timetable_stop_t_mos_ru(name2, [10, 20], times2)
# self.assertTrue(stop == stop2)
# self.assertFalse(stop != stop2)
#
# def test_eq_false_name(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# name2 = "Улица Ивановаа"
# times2 = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop2 = Timetable_stop_t_mos_ru(name2, [10, 20], times2)
# self.assertFalse(stop == stop2)
# self.assertTrue(stop != stop2)
#
# def test_eq_false_time(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# name2 = "Улица Иванова"
# times2 = [
# Timetable_stop_time_t_mos_ru(time(10, 13), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop2 = Timetable_stop_t_mos_ru(name2, [10, 20], times2)
# self.assertFalse(stop == stop2)
# self.assertTrue(stop != stop2)
#
# def test_eq_false_coords(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# name2 = "Улица Иванова"
# times2 = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop2 = Timetable_stop_t_mos_ru(name2, [10, 21], times2)
# self.assertFalse(stop == stop2)
# self.assertTrue(stop != stop2)
#
# def test_eq_shake(self):
# name = "Улица Иванова"
# times = [
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# ]
# stop = Timetable_stop_t_mos_ru(name, [10, 20], times)
# name2 = "Улица Иванова"
# times2 = [
# Timetable_stop_time_t_mos_ru(time(20, 30), None),
# Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
# ]
# stop2 = Timetable_stop_t_mos_ru(name2, [10, 20], times2)
# self.assertTrue(stop == stop2)
# self.assertFalse(stop != stop2)
class TestTimetable_t_mos_ru(TestCase):
def test_timetable_t_mos_ru(self):
id_route_t_mos_ru = '393'
direction = 1
current_date = date(2022, 2, 20)
times = [
Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
Timetable_stop_time_t_mos_ru(time(20, 30), None),
]
stop = Timetable_stop_builder_t_mos_ru().set_name('ул. Иванова').set_coords([20, 30]). \
add_item_timetable(times[0].get_time(), times[0].get_color_special_flight()). \
add_item_timetable(times[1].get_time()).build()
stops = [
stop
]
timetable = Timetable_t_mos_ru(id_route_t_mos_ru, direction, current_date, stops)
self.assertEqual(timetable.get_id_route_t_mos_ru(), id_route_t_mos_ru)
self.assertEqual(timetable.get_date(), current_date)
self.assertEqual(timetable.get_stops(), stops)
self.assertEqual(timetable.get_direction(), direction)
for stop_current in timetable:
self.assertEqual(stop_current, stop)
class TestTimetable_stop_builder_t_mos_ru(TestCase):
def test_timetable_stop(self):
name = "Улица Иванова"
times = [
Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
Timetable_stop_time_t_mos_ru(time(20, 30), None),
]
stop = Timetable_stop_builder_t_mos_ru().set_name(name). \
add_item_timetable(times[0].get_time(), times[0].get_color_special_flight()). \
add_item_timetable(times[1].get_time()). \
set_coords([10, 20]).build()
self.assertEqual(stop.get_name(), name)
self.assertEqual(list(stop.get_times()), times)
self.assertEqual(stop.get_coords(), [10, 20])
pass
class TestTimetable_builder_t_mos_ru(TestCase):
def test_timetable_builder_t_mos_ru(self):
id_route_t_mos_ru = '393'
direction = 1
current_date = date(2022, 2, 20)
times = [
Timetable_stop_time_t_mos_ru(time(10, 12), "Red"),
Timetable_stop_time_t_mos_ru(time(20, 30), None),
]
stop = Timetable_stop_builder_t_mos_ru().set_name('ул. Иванова').set_coords([20, 30]). \
add_item_timetable(times[0].get_time(), times[0].get_color_special_flight()). \
add_item_timetable(times[1].get_time()).build()
stops = [
stop
]
builder = Timetable_builder_t_mos_ru(). \
set_date(current_date).set_direction(direction). \
set_id_route_t_mos_ru(id_route_t_mos_ru)
builder.add_stop().set_name('ул. Иванова').set_coords([20, 30]). \
add_item_timetable(times[0].get_time(), times[0].get_color_special_flight()). \
add_item_timetable(times[1].get_time())
timetable = builder.build()
self.assertEqual(timetable.get_id_route_t_mos_ru(), id_route_t_mos_ru)
self.assertEqual(timetable.get_date(), current_date)
self.assertEqual(timetable.get_stops(), stops)
self.assertEqual(timetable.get_direction(), direction)
for stop_current in timetable:
self.assertEqual(stop_current, stop)
pass
| 39.276817
| 105
| 0.632455
| 1,510
| 11,351
| 4.360927
| 0.06755
| 0.203341
| 0.069248
| 0.076538
| 0.878512
| 0.848899
| 0.817008
| 0.800607
| 0.776765
| 0.774639
| 0
| 0.049677
| 0.249846
| 11,351
| 288
| 106
| 39.413194
| 0.723664
| 0.313717
| 0
| 0.596273
| 0
| 0
| 0.019878
| 0
| 0
| 0
| 0
| 0
| 0.229814
| 1
| 0.093168
| false
| 0.012422
| 0.018634
| 0
| 0.149068
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
488ee1465e7ce26a90672520e9b6cc4f3d43490e
| 20,911
|
py
|
Python
|
happ/tests/api/regular/test_auth.py
|
Mafioso/happ-backend
|
3a3a57add4ec55985cc37db79c71430ed78b1ee3
|
[
"MIT"
] | 1
|
2017-10-22T06:12:33.000Z
|
2017-10-22T06:12:33.000Z
|
happ/tests/api/regular/test_auth.py
|
Mafioso/happ-backend
|
3a3a57add4ec55985cc37db79c71430ed78b1ee3
|
[
"MIT"
] | null | null | null |
happ/tests/api/regular/test_auth.py
|
Mafioso/happ-backend
|
3a3a57add4ec55985cc37db79c71430ed78b1ee3
|
[
"MIT"
] | null | null | null |
import datetime
from django.utils.http import urlsafe_base64_encode
from django.utils.encoding import force_bytes
from django.contrib.auth.tokens import default_token_generator
from rest_framework import status
from rest_framework.test import APISimpleTestCase
from rest_framework_jwt.settings import api_settings
from happ.auth.utils import generate_confirmation_key
from happ.models import User
from happ.factories import UserFactory
from happ.tests import *
class Tests(APISimpleTestCase):
def test_user_registration(self):
"""
Registration resourse creates one user
it creates embedded settings
it returns JWT auth token
"""
n = User.objects.count()
url = prepare_url('register')
data = {
'username': 'username',
'email': 'email@mail.com',
'password': '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = User.objects.get(username='username')
self.assertEqual(User.objects.count(), n+1)
self.assertNotEqual(user.settings, None)
self.assertIn('token', response.data)
self.assertEqual(user.username, 'username')
self.assertEqual(user.email, 'email@mail.com')
def test_user_registration_same_username(self):
"""
We cannot register user with existing username
"""
n = User.objects.count()
url = prepare_url('register')
data = {
'username': 'username',
'email': 'email@mail.com',
'password': '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = User.objects.get(username='username')
self.assertEqual(User.objects.count(), n+1)
self.assertNotEqual(user.settings, None)
self.assertIn('token', response.data)
n = User.objects.count()
url = prepare_url('register')
data = {
'username': 'username',
'email': 'email@mail.com',
'password': '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(User.objects.count(), n)
def test_user_registration_no_username(self):
"""
Ensures that we cannot register without username
"""
n = User.objects.count()
url = prepare_url('register')
data = {
'email': 'email@mail.com',
'password': '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('error_message', response.data)
self.assertEqual(User.objects.count(), n)
def test_user_registration_no_email(self):
"""
Ensures that we can register without email
"""
n = User.objects.count()
url = prepare_url('register')
data = {
'username': 'username',
'password': '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = User.objects.get(username='username')
self.assertEqual(User.objects.count(), n+1)
self.assertNotEqual(user.settings, None)
self.assertIn('token', response.data)
def test_user_registration_no_password(self):
"""
Ensures that we cannot register without password
"""
n = User.objects.count()
url = prepare_url('register')
data = {
'username': 'username',
'email': 'email@mail.com',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('error_message', response.data)
self.assertEqual(User.objects.count(), n)
def test_user_facebook_registration(self):
"""
Facebook registration resourse creates one user
it creates embedded settings
it returns JWT auth token
"""
n = User.objects.count()
url = prepare_url('facebook-register')
data = {
'facebook_id': '123456',
'fullname': 'Richard Green',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = User.objects.get(username='123456')
self.assertEqual(User.objects.count(), n+1)
self.assertNotEqual(user.settings, None)
self.assertIn('token', response.data)
self.assertNotEqual(user.facebook_id, None)
def test_user_registration_same_facebook_id(self):
"""
We cannot register user with existing facebook_id
"""
n = User.objects.count()
url = prepare_url('facebook-register')
data = {
'facebook_id': '123456',
'fullname': 'Richard Green',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = User.objects.get(username='123456')
self.assertEqual(User.objects.count(), n+1)
self.assertNotEqual(user.settings, None)
self.assertIn('token', response.data)
n = User.objects.count()
url = prepare_url('facebook-register')
data = {
'facebook_id': '123456',
'fullname': 'Richard Green',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(User.objects.count(), n)
def test_user_registration_no_facebook_id(self):
"""
Ensures that we cannot register without facebook_id
"""
n = User.objects.count()
url = prepare_url('facebook-register')
data = {
'fullname': 'Richard Green',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn('error_message', response.data)
self.assertEqual(User.objects.count(), n)
def test_authentication(self):
"""
Ensures that user can authenticate with his username and password
it returns JWT
"""
u = UserFactory()
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertNotEqual(response.data['token'], None)
data = {
'username': u.username,
'password': '1234'
}
response = self.client.post(auth_url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertNotIn('token', response.data)
def test_password_reset(self):
"""
Ensures that user can reset password
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-reset')
data = {}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {
'email': u.email,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_password_reset_confirm(self):
"""
Ensures that user can confirm password reset
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-reset-confirm')
data = {
'uidb64': urlsafe_base64_encode(force_bytes(u.pk)),
'token': default_token_generator.make_token(u),
'new_password': '1234567a',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_password_reset_confirm_no_uidb64(self):
"""
Ensures that user cannot confirm password reset without uidb64
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-reset-confirm')
data = {
'token': default_token_generator.make_token(u),
'new_password': '1234567a',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_reset_confirm_no_token(self):
"""
Ensures that user cannot confirm password reset without token
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-reset-confirm')
data = {
'uidb64': urlsafe_base64_encode(force_bytes(u.pk)),
'new_password': '1234567a',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_reset_confirm_wrong_uidb64(self):
"""
Ensures that user cannot confirm password reset with wrong uidb64
"""
u = UserFactory()
u.set_password('123')
u.save()
u2 = UserFactory()
u2.set_password('123')
u2.save()
url = prepare_url('password-reset-confirm')
data = {
'uidb64': urlsafe_base64_encode(force_bytes(u2.pk)),
'token': default_token_generator.make_token(u),
'new_password': '1234567a',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_reset_confirm_wrong_token(self):
"""
Ensures that user cannot confirm password reset with wrong token
"""
u = UserFactory()
u.set_password('123')
u.save()
u2 = UserFactory()
u2.set_password('123')
u2.save()
url = prepare_url('password-reset-confirm')
data = {
'uidb64': urlsafe_base64_encode(force_bytes(u.pk)),
'token': default_token_generator.make_token(u2),
'new_password': '1234567a',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_change_password(self):
"""
We can change user's password
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-change')
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertNotEqual(response.data['token'], None)
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
data = {
'old_password': '123',
'new_password': '1234qwerASDF!@#$',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
data = {
'username': u.username,
'password': '1234qwerASDF!@#$'
}
response = self.client.post(auth_url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertNotEqual(response.data['token'], None)
def test_change_password_wrong_old(self):
"""
We cannot change user's password with wrong old password
"""
u = UserFactory()
u.set_password('123')
u.save()
url = prepare_url('password-change')
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertNotEqual(response.data['token'], None)
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
data = {
'old_password': '1234',
'new_password': '1234qwerASDF!@#$',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_confirm_request(self):
"""
Ensures that user send email confirmation request
"""
u = UserFactory()
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
response = self.client.post(url, format='json')
u = User.objects.get(id=u.id)
self.assertNotEqual(u.confirmation_key, None)
self.assertNotEqual(u.confirmation_key_expires, None)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
u.email = None
u.save()
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_confirm_request_with_email(self):
"""
Ensures that user send email confirmation request with email in request data
"""
u = UserFactory()
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
data = {
'email': 'some@mail.com'
}
response = self.client.post(url, data=data, format='json')
u = User.objects.get(id=u.id)
self.assertNotEqual(u.confirmation_key, None)
self.assertNotEqual(u.confirmation_key_expires, None)
self.assertEqual(u.email, 'some@mail.com')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_email_confirm(self):
"""
Ensures that user can confirm email
"""
u = UserFactory(role=User.REGULAR)
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
response = self.client.post(url, format='json')
u = User.objects.get(id=u.id)
url = prepare_url('email-confirm')
data = {
'key': u.confirmation_key
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
u = User.objects.get(id=u.id)
self.assertEqual(u.role, User.ORGANIZER)
self.assertEqual(u.confirmation_key, None)
self.assertEqual(u.confirmation_key_expires, None)
def test_email_confirm_no_key(self):
"""
Ensures that user cannot confirm email with no key provided
"""
u = UserFactory(role=User.REGULAR)
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
response = self.client.post(url, format='json')
u = User.objects.get(id=u.id)
url = prepare_url('email-confirm')
data = {}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_confirm_wrong_key(self):
"""
Ensures that user cannot confirm email with wrong key provided
"""
u = UserFactory(role=User.REGULAR)
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
response = self.client.post(url, format='json')
u = User.objects.get(id=u.id)
url = prepare_url('email-confirm')
data = {
'key': u.confirmation_key+'123'
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_confirm_key_expired(self):
"""
Ensures that user cannot confirm email with expired key
"""
u = UserFactory(role=User.REGULAR)
u.set_password('123')
u.save()
auth_url = prepare_url('login')
data = {
'username': u.username,
'password': '123'
}
response = self.client.post(auth_url, data=data, format='json')
token = response.data['token']
self.client.credentials(HTTP_AUTHORIZATION='{} {}'.format(api_settings.JWT_AUTH_HEADER_PREFIX, token))
url = prepare_url('email-confirm-request')
response = self.client.post(url, format='json')
u = User.objects.get(id=u.id)
u.confirmation_key_expires = u.confirmation_key_expires - datetime.timedelta(days=settings.CONFIRMATION_KEY_EXPIRES)
u.save()
url = prepare_url('email-confirm')
data = {
'key': u.confirmation_key
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_facebook_login(self):
"""
Login with facebook
it returns JWT auth token
"""
facebook_id = '123'
u = UserFactory(facebook_id=facebook_id)
url = prepare_url('facebook-login')
data = {
'facebook_id': facebook_id,
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertIn('token', response.data)
def test_facebook_login_no_facebook_id(self):
"""
cannot auth if no facebook_id provided
"""
facebook_id = '123'
u = UserFactory(facebook_id=facebook_id)
url = prepare_url('facebook-login')
data = {
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_facebook_login_wrong_facebook_id(self):
"""
cannot auth if such facebook_id is not registered
"""
facebook_id = '123'
u = UserFactory(facebook_id=facebook_id)
url = prepare_url('facebook-login')
data = {
'facebook_id': facebook_id + '123',
}
response = self.client.post(url, data=data, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 34.678275
| 124
| 0.606666
| 2,365
| 20,911
| 5.18351
| 0.060042
| 0.042418
| 0.064606
| 0.078962
| 0.899584
| 0.871605
| 0.865405
| 0.839383
| 0.826495
| 0.807244
| 0
| 0.022148
| 0.272345
| 20,911
| 602
| 125
| 34.73588
| 0.783517
| 0.07025
| 0
| 0.778032
| 0
| 0
| 0.100825
| 0.012557
| 0
| 0
| 0
| 0
| 0.169336
| 1
| 0.059497
| false
| 0.135011
| 0.025172
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d2ab6543b95512244cd6a2e88287c0d3f0071b48
| 36,348
|
py
|
Python
|
test/_traceable_test.py
|
shaharazulay/traceable-dict
|
212b3216544d5c3d7842729f14c8d5f43971bebd
|
[
"BSD-3-Clause"
] | 1
|
2018-10-21T06:55:45.000Z
|
2018-10-21T06:55:45.000Z
|
test/_traceable_test.py
|
shaharazulay/traceable-dict
|
212b3216544d5c3d7842729f14c8d5f43971bebd
|
[
"BSD-3-Clause"
] | null | null | null |
test/_traceable_test.py
|
shaharazulay/traceable-dict
|
212b3216544d5c3d7842729f14c8d5f43971bebd
|
[
"BSD-3-Clause"
] | null | null | null |
import warnings
import time
import unittest
import warnings
from traceable_dict import TraceableDict
from traceable_dict._utils import key_removed, key_added, key_updated, root, uncommitted
class _WarningTestMixin(object):
"""A test which checks if the specified warning was raised"""
def assertWarns(self, warning, callable, msg, *args, **kwds):
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter('always')
result = callable(*args, **kwds)
self.assertTrue(any(item.category == warning for item in warning_list))
self.assertTrue(msg == str(warning_list[-1].message))
class TraceableTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._d1 = {1: {2: "A", 3: {4: "B", 5: [1, 2, 3]}}}
cls._d2 = {1: {2: "A_UPDATED", 3: {4: "B_UPDATED"}}}
def test_basic(self):
r1, r2, r3 = 1, 2, 3
d1 = self._d1.copy()
D1 = TraceableDict(d1)
self.assertEquals(d1, D1.as_dict())
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [])
self.assertTrue(D1.has_uncommitted_changes)
D1.commit(revision=r1)
self.assertEquals(d1, D1.as_dict())
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [r1])
self.assertFalse(D1.has_uncommitted_changes)
D1['new_key'] = 'new_val'
self.assertTrue(D1.has_uncommitted_changes)
D1.commit(revision=r2)
d1['new_key'] = 'new_val'
self.assertEquals(d1, D1.as_dict())
self.assertEquals([r1, r2], D1.revisions)
self.assertFalse(D1.has_uncommitted_changes)
self.assertEquals(
D1.trace,
{str(r2): [((root, 'new_key'), None, key_added)]})
D1.pop('new_key')
D1.commit(revision=r3)
self.assertNotEquals(d1, D1.as_dict())
d1.pop('new_key')
self.assertEquals(d1, D1.as_dict())
self.assertEquals([r1, r2, r3], D1.revisions)
self.assertEquals(
D1.trace,
{'2': [((root, 'new_key'), None, key_added)],
'3': [((root, 'new_key'), 'new_val', key_removed)]})
def test_new_value_is_traceable(self):
r1, r2 = 1, 2
d1 = self._d1.copy()
D1 = TraceableDict(d1)
self.assertEquals(d1, D1.as_dict())
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [])
self.assertTrue(D1.has_uncommitted_changes)
D1['new_key'] = 'new_val'
self.assertEquals(D1.as_dict(), {1: {2: 'A', 3: {4: 'B', 5: [1, 2, 3]}}, 'new_key': 'new_val'})
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [])
self.assertTrue(D1.has_uncommitted_changes)
D1['new_key'] = 'updated_val'
self.assertEquals(D1.as_dict(), {1: {2: 'A', 3: {4: 'B', 5: [1, 2, 3]}}, 'new_key': 'updated_val'})
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [])
self.assertTrue(D1.has_uncommitted_changes)
D1.commit(revision=r1)
self.assertFalse(D1.has_uncommitted_changes)
self.assertEquals(D1.as_dict(), {1: {2: 'A', 3: {4: 'B', 5: [1, 2, 3]}}, 'new_key': 'updated_val'})
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [r1])
def test_full(self):
r1 = int(time.time() * 1000)
d1 = self._d1.copy()
d2 = d1.copy()
d2['new_key'] = 'new_val'
D1 = TraceableDict(d1)
D2 = TraceableDict(d2)
D3 = TraceableDict(self._d2)
D1 = D1 | D2
D1_Base = TraceableDict(D1)
D1.commit(revision=r1)
self.assertEquals(D1.as_dict(), d2)
self.assertEquals(D1.trace, {})
self.assertEquals(D1.revisions, [r1])
D1 = D1 | D3
self.assertTrue(D1.has_uncommitted_changes)
self.assertEquals(D1.as_dict(), self._d2)
self.assertEquals(D1.revisions, [r1])
r2 = int((time.time() - 5000) * 1000)
with self.assertRaises(ValueError) as err:
D1.commit(revision=r2)
self.assertTrue('cannot commit to earlier revision' in err.exception)
r2 = int((time.time() + 5000) * 1000)
D1.commit(revision=r2)
self.assertFalse(D1.has_uncommitted_changes)
self.assertEquals(D1.as_dict(), self._d2)
self.assertEquals(D1.revisions, [r1, r2])
result_base = D1.checkout(revision=r1)
self.assertEquals(result_base.as_dict(), D1_Base.as_dict())
self.assertEquals(result_base.revisions, [r1])
def test_pipe_immutable(self):
d1 = self._d1.copy()
d2 = d1.copy()
d2['new_key'] = 'new_val'
D1 = TraceableDict(d1)
self.assertEquals(d1, D1.as_dict())
D2 = TraceableDict(d2)
D1_before = D1.copy()
D1_tag = D1 | D2
D1_tag.commit(revision=1)
self.assertEquals(d2, D1_tag.as_dict())
self.assertEquals(d1, D1.as_dict())
self.assertFalse(D1.trace)
self.assertEquals(D1, D1_before)
def test_pipe_operator(self):
r1, r2, r3 = 1, 2, 3
d1 = self._d1.copy()
d2 = d1.copy()
d2['new_key'] = 'new_val'
D1 = TraceableDict(d1)
self.assertEquals(d1, D1.as_dict())
D2 = TraceableDict(d2)
D2.commit(revision=r2)
self.assertEquals(d2, D2.as_dict())
D1 = D1 | D2
D1.commit(revision=r1)
self.assertEquals(d2, D1.as_dict())
self.assertEquals(D1.trace, {})
D1['new_key'] = 'updated_value'
D1.commit(revision=r2)
self.assertEquals(
D1.trace,
{str(r2): [((root, 'new_key'), 'new_val', key_updated)]})
D3 = TraceableDict(d1)
D2 = D2 | D3
D2.commit(revision=r3)
self.assertEquals(d1, D2.as_dict())
self.assertEquals(
D2.trace,
{str(r3): [((root, 'new_key'), 'new_val', key_removed)]})
def test_pipe_operator_multiple(self):
r1, r2 = 1, 2
d1 = self._d1.copy()
d2 = d1.copy()
d2['new_key'] = 'new_val'
D1 = TraceableDict(d2)
D1.commit(revision=r1)
D2 = TraceableDict(d1)
D2.commit(revision=r1)
D3 = TraceableDict(d2)
D3.commit(revision=r1)
D4 = D1 | D2 | D3
D4.commit(revision=r2)
self.assertEquals(d2, D4.as_dict())
trace = D4.trace[str(r2)]
self.assertIn(
((root, 'new_key'), 'new_val', key_removed),
trace)
self.assertIn(
((root, 'new_key'), None, key_added),
trace)
def test_init_traceable_dict(self):
r1, r2 = 1, 2
td1 = TraceableDict({'a': 1, 'b':2})
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
self.assertTrue(td1.has_uncommitted_changes)
td1.commit(revision=r1)
self.assertEquals(td1.as_dict(), {'a': 1, 'b': 2})
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.revisions, [r1])
td2 = TraceableDict(td1)
self.assertEquals(td2.as_dict(), td1.as_dict())
self.assertFalse(td2.has_uncommitted_changes)
self.assertEquals(td2.revisions, td1.revisions)
self.assertEquals(td2.trace, td1.trace)
td1['a'] = 8
td2 = TraceableDict(td1)
self.assertEquals(td2.as_dict(), td1.as_dict())
self.assertTrue(td2.has_uncommitted_changes)
self.assertEquals(td2.revisions, td1.revisions)
self.assertEquals(td2.trace, td1.trace)
td2.commit(revision=r2)
self.assertFalse(td2.has_uncommitted_changes)
class CommitTest(unittest.TestCase, _WarningTestMixin):
def test_basic(self):
r1, r2 = 1, 2
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
self.assertEquals([], td1.revisions)
self.assertEquals({}, td1.trace)
self.assertTrue(td1.has_uncommitted_changes)
td1.commit(r1)
self.assertEquals([r1], td1.revisions)
self.assertEquals({}, td1.trace)
self.assertFalse(td1.has_uncommitted_changes)
td1["a"] = 1
self.assertEquals([r1], td1.revisions)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), {'a': 1, 'b': 'bb'})
self.assertEquals(td1.trace, {uncommitted: [((root, 'a'), 'aa', key_updated)]})
td1.commit(revision=r2)
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), {'a': 1, 'b': 'bb'})
self.assertEquals(td1.trace, {str(r2): [((root, 'a'), 'aa', key_updated)]})
self.assertEquals([r1, r2], td1.revisions)
def test_first_commit(self):
d1 = {"a": "aa", "b":"bb"}
d2 = {"a": "aa", "b":"bb", "c": "cc"}
td1 = TraceableDict(d1)
self.assertEquals(d1, td1.as_dict())
self.assertEquals([], td1.revisions)
self.assertEquals({}, td1.trace)
self.assertTrue(td1.has_uncommitted_changes)
td2 = TraceableDict(d2)
td1 = td1 | td2
self.assertEquals(td2.as_dict(), td1.as_dict())
self.assertEquals([], td1.revisions)
self.assertEquals({}, td1.trace)
self.assertTrue(td1.has_uncommitted_changes)
r1 = 1
td1.commit(revision=r1)
self.assertEquals(td2.as_dict(), td1.as_dict())
self.assertEquals([r1], td1.revisions)
self.assertEquals({}, td1.trace)
self.assertFalse(td1.has_uncommitted_changes)
td_with_previous_revisions = TraceableDict(td1)
self.assertFalse(td_with_previous_revisions.has_uncommitted_changes)
def test_commit_invalid_revision(self):
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
self.assertEquals([], td1.revisions)
self.assertTrue(td1.has_uncommitted_changes)
with self.assertRaises(ValueError) as err:
td1.commit(revision=None)
self.assertTrue('revision cannot be None' in err.exception)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
with self.assertRaises(ValueError) as err:
td1.commit(revision='invalid')
self.assertTrue('revision must be an integer' in err.exception)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
def test_commit_current_revision(self):
r1 = 1
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
self.assertEquals([r1], td1.revisions)
self.assertFalse(td1.has_uncommitted_changes)
td1["a"] = 1
self.assertTrue(td1.has_uncommitted_changes)
with self.assertRaises(ValueError) as err:
td1.commit(revision=r1)
self.assertTrue('cannot commit to earlier revision' in err.exception)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.trace, {uncommitted: [((root, 'a'), 'aa', key_updated)]})
self.assertEquals([r1], td1.revisions)
def test_commit_earlier_revision(self):
r1 = 1
td1 = TraceableDict({"a": "aa", "b":"bb"})
td1.commit(revision=1)
self.assertEquals([1], td1.revisions)
self.assertFalse(td1.has_uncommitted_changes)
td1["a"] = 1
self.assertTrue(td1.has_uncommitted_changes)
revision = 3
td1.commit(revision=revision)
self.assertEquals([r1, revision], td1.revisions)
self.assertFalse(td1.has_uncommitted_changes)
td1["a"] = 2
self.assertTrue(td1.has_uncommitted_changes)
earlier_revision = 2
with self.assertRaises(ValueError) as err:
td1.commit(revision=earlier_revision)
self.assertTrue('cannot commit to earlier revision' in err.exception)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.trace, {'3': [((root, 'a'), 'aa', key_updated)], uncommitted: [((root, 'a'), 1, key_updated)]})
self.assertEquals([r1, revision], td1.revisions)
def test_commit_no_diff(self):
base_revision = 1
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=base_revision)
self.assertEquals({}, td1.trace)
self.assertEquals(d1, td1.as_dict())
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals([base_revision], td1.revisions)
self.assertWarns(
UserWarning,
td1.commit,
msg='nothing to commit',
revision=18
)
self.assertEquals({}, td1.trace)
self.assertEquals(d1, td1.as_dict())
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals([base_revision], td1.revisions)
class RevertTest(unittest.TestCase):
def test_basic(self):
base_revision = 0
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=base_revision)
self.assertEquals([base_revision], td1.revisions)
r1 = 1
td1["a"] = 1
td1.commit(revision=r1)
r2 = 2
td1["b"] = 2
td1.commit(revision=r2)
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), {'a': 1, 'b': 2})
self.assertEquals(td1.trace, {
str(r1): [((root, 'a'), 'aa', key_updated)],
str(r2): [((root, 'b'), 'bb', key_updated)]})
self.assertEquals([base_revision, r1, r2], td1.revisions)
td1["b"] = 3
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), {'a': 1, 'b': 3})
self.assertEquals(td1.trace, {
str(r1): [((root, 'a'), 'aa', key_updated)],
str(r2): [((root, 'b'), 'bb', key_updated)],
uncommitted:[((root, 'b'), 2, key_updated)]})
self.assertEquals([base_revision, r1, r2], td1.revisions)
td1.revert()
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), {"a": 1, "b": 2})
self.assertEquals(td1.trace, {
str(r1): [((root, 'a'), 'aa', key_updated)],
str(r2): [((root, 'b'), 'bb', key_updated)]})
self.assertEquals([base_revision, r1, r2], td1.revisions)
def test_revert_no_revisions(self):
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
td1.revert()
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
def test_revert_to_first_revision(self):
base_revision = 0
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=base_revision)
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals([base_revision], td1.revisions)
td1["b"] = 1
self.assertTrue(td1.has_uncommitted_changes)
self.assertNotEquals(td1.as_dict(), d1)
self.assertNotEquals(td1.trace, {})
self.assertEquals([base_revision], td1.revisions)
td1.revert()
self.assertFalse(td1.has_uncommitted_changes)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals([base_revision], td1.revisions)
def test_revert_without_uncommitted_changes(self):
r1, r2 = 1, 2
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
self.assertEquals([r1], td1.revisions)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertFalse(td1.has_uncommitted_changes)
td1.revert()
self.assertEquals([r1], td1.revisions)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertFalse(td1.has_uncommitted_changes)
td1["a"] = 1
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(
td1.trace,
{uncommitted: [((root, 'a'), 'aa', key_updated)]})
self.assertEquals(td1.as_dict(), {"a": 1, "b": "bb"})
td1.revert()
self.assertEquals(td1.as_dict(), {"a": "aa", "b": "bb"})
self.assertEquals(td1.trace, {})
td1["a"] = 1
td1.commit(revision=r2)
_trace = {str(r2): [((root, 'a'), 'aa', key_updated)]}
self.assertEquals([r1, r2], td1.revisions)
self.assertEquals(td1.as_dict(), {"a": 1, "b": "bb"})
self.assertEquals(td1.trace, _trace)
self.assertFalse(td1.has_uncommitted_changes)
td1.revert()
self.assertEquals([r1, r2], td1.revisions)
self.assertEquals(td1.as_dict(), {"a": 1, "b": "bb"})
self.assertEquals(td1.trace, _trace)
self.assertFalse(td1.has_uncommitted_changes)
class AsDictTest(unittest.TestCase):
def test_basic(self):
base_revision = 0
d1 = {"a": 1, "b":2, "c": 3}
td1 = TraceableDict(d1)
td1.commit(revision=base_revision)
self.assertEquals([base_revision], td1.revisions)
self.assertEqual(d1, td1.as_dict())
r1 = 1
td1["a"] = "updated"
td1.commit(revision=r1)
self.assertEqual(td1.as_dict(), {"a": "updated", "b":2, "c": 3})
td1["b"] = "also_updated"
self.assertTrue(td1.has_uncommitted_changes)
self.assertEqual(td1.as_dict(), {"a": "updated", "b": "also_updated", "c": 3})
class CheckoutTests(unittest.TestCase):
def test_basic(self):
r1, r2, r3 = 1, 2, 3
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1["a"] = 2
td1.commit(revision=r2)
td1["b"] = 3
td1.commit(revision=r3)
self.assertEquals(td1.as_dict(), {'a': 2, 'b': 3})
self.assertEquals(td1.trace, {
str(r2): [((root, 'a'), 'aa', key_updated)],
str(r3): [((root, 'b'), 'bb', key_updated)]})
self.assertEquals([r1, r2, r3], td1.revisions)
result_r2 = td1.checkout(revision=r2)
self.assertFalse(result_r2.has_uncommitted_changes)
self.assertEquals(result_r2.as_dict(), {'a': 2, 'b': 'bb'})
self.assertEquals(result_r2.trace, {str(r2): [((root, 'a'), 'aa', key_updated)]})
self.assertEquals([r1, r2], result_r2.revisions)
result_r1_1 = td1.checkout(revision=r1)
self.assertFalse(result_r1_1.has_uncommitted_changes)
self.assertEquals(result_r1_1.as_dict(), {'a': 'aa', 'b': 'bb'})
self.assertEquals(result_r1_1.trace, {})
self.assertEquals([r1], result_r1_1.revisions)
result_r1_2 = result_r2.checkout(revision=r1)
self.assertEquals(result_r1_1.has_uncommitted_changes, result_r1_2.has_uncommitted_changes)
self.assertEquals(result_r1_1.as_dict(), result_r1_2.as_dict())
self.assertEquals(result_r1_1.trace, result_r1_2.trace)
self.assertEquals(result_r1_1.revisions, result_r1_2.revisions)
def test_checkout_key_removed(self):
r1, r2 = 1, 2
d1 = {}
d2 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td2 = TraceableDict(d2)
td2.commit(revision=r1)
td1 = td1 | td2
td1.commit(revision=r2)
result_r1 = td1.checkout(revision=r1)
self.assertFalse(result_r1.has_uncommitted_changes)
self.assertEquals(result_r1.as_dict(), d1)
self.assertEquals(result_r1.trace, {})
self.assertEquals([r1], result_r1.revisions)
def test_checkout_no_revisions(self):
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
with self.assertRaises(Exception) as err:
td1.checkout(revision=1)
self.assertTrue('no revisions available. you must commit an initial revision first.' in err.exception)
def test_checkout_invalid_revision(self):
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=1)
with self.assertRaises(ValueError) as err:
td1.checkout(revision=None)
self.assertTrue("revision must be an integer" in err.exception)
with self.assertRaises(ValueError) as err:
td1.checkout(revision="invalid")
self.assertTrue("revision must be an integer" in err.exception)
def test_checkout_unknown_revision(self):
r1, r2, r3 = 1, 2, 3
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1["a"] = 1
td1.commit(revision=r2)
td1["b"] = 2
td1.commit(revision=r3)
unknown_revision = 55
with self.assertRaises(ValueError) as err:
td1.checkout(revision=unknown_revision)
self.assertTrue('unknown revision %s' % unknown_revision in err.exception)
def test_checkout_current_revision(self):
r1, r2 = 1, 2
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1["a"] = 1
td1.commit(revision=r1)
td1["b"] = 2
td1.commit(revision=r2)
result_r2 = td1.checkout(revision=r2)
self.assertEquals(result_r2.has_uncommitted_changes, td1.has_uncommitted_changes)
self.assertEquals(result_r2.as_dict(), td1.as_dict())
self.assertEquals(result_r2.trace, td1.trace)
self.assertEquals(result_r2.revisions, td1.revisions)
def test_checkout_uncommitted_changes(self):
r1 = 1
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
td1["a"] = 1
td1.commit(revision=r1)
td1["b"] = 2
self.assertTrue(td1.has_uncommitted_changes)
with self.assertRaises(Exception) as err:
td1.checkout(revision=r1)
self.assertTrue('dictionary has uncommitted changes. you must commit or revert first.' in err.exception)
class LogTests(unittest.TestCase):
def test_basic(self):
r1, r2, r3 = 1, 2, 3
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"C": 1, "D": [2, 3], "E": 4, "F": 5}}}
d3 = {"A": {"B": {"C": 1, "D": [2, 3, 4]}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1["a"] = 1
td1.commit(revision=r3)
log = td1.log(path=("A",))
self.assertEquals(log.keys(), [r1, r2, r3])
self.assertEquals(log[r1], d1)
self.assertEquals(log[r2], d2)
self.assertEquals(log[r3], d3)
log = td1.log(path=("A", "B"))
self.assertEquals(log.keys(), [r1, r2, r3])
self.assertEquals(log[r1], d1["A"])
self.assertEquals(log[r2], d2["A"])
self.assertEquals(log[r3], d3["A"])
log = td1.log(path=("A", "B", "C"))
self.assertEquals(log.keys(), [r1])
self.assertEquals(log[r1], {"C": 1})
log = td1.log(path=("A", "B", "D"))
self.assertEquals(log.keys(), [r1, r3])
self.assertEquals(log[r1], {"D": [2, 3]})
self.assertEquals(log[r3], {"D": [2, 3, 4]})
log = td1.log(path=("A", "B", "E"))
self.assertEquals(log.keys(), [r2, r3])
self.assertEquals(log[r2], {'E': 4})
self.assertEquals(log[r3], {'E': {}})
log = td1.log(path=("A", "B", "F"))
self.assertEquals(log.keys(), [r2, r3])
self.assertEquals(log[r2], {'F': 5})
self.assertEquals(log[r3], {'F': {}})
log = td1.log(path=('a',))
self.assertEquals(log.keys(), [r3])
self.assertEquals(log[r3], {'a': 1})
def test_key_not_in_base_revision(self):
r1, r2, r3 = 1, 2, 3
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"C": 1, "D": [2, 3], "E": 4, "F": 5}}}
d3 = {"A": {"B": {"C": 1, "D": [2, 3, 4]}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
log = td1.log(path=("A", "B", "E"))
self.assertEquals(log.keys(), [r2, r3])
self.assertEquals(log[r2], {'E': 4})
self.assertEquals(log[r3], {'E': {}})
log = td1.log(path=("A", "B", "F"))
self.assertEquals(log.keys(), [r2, r3])
self.assertEquals(log[r2], {'F': 5})
self.assertEquals(log[r3], {'F': {}})
def test_key_removed_and_returns(self):
r1, r2, r3 = 1, 2, 3
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"D": [2, 3], "E": 4, "F": 5}}}
d3 = {"A": {"B": {"C": 1, "D": [2, 3, 4]}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
log = td1.log(path=("A", "B", "C"))
self.assertEquals(log.keys(), [r1, r2, r3])
self.assertEquals(log[r1], {'C': 1})
self.assertEquals(log[r2], {'C': {}})
self.assertEquals(log[r3], {'C': 1})
def test_key_not_changing_in_revision(self):
r1, r2, r3, r4 = 1, 2, 3, 4
d1 = {"A": {"B": {"C": 1}}}
d2 = {"A": {"B": {"C": 2}}}
d3 = {"A": {"B": {"C": 2, "D": 1}}}
d4 = {"A": {"B": {"C": 3, "D": 1}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
td1 = td1 | d4
td1.commit(revision=r4)
log = td1.log(path=("A", "B", "C"))
self.assertEquals(log.keys(), [r1, r2, r4])
self.assertEquals(log[r1], {'C': 1})
self.assertEquals(log[r2], {'C': 2})
self.assertEquals(log[r4], {'C': 3})
d1 = {"A": {"B": {"D": 1}}}
d2 = {"A": {"B": {"C": 2}}}
d3 = {"A": {"B": {"C": 2, "D": 1}}}
d4 = {"A": {"B": {"C": 3, "D": 1}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
td1 = td1 | d4
td1.commit(revision=r4)
log = td1.log(path=("A", "B", "C"))
self.assertEquals(log.keys(), [r2, r4])
self.assertEquals(log[r2], {'C': 2})
self.assertEquals(log[r4], {'C': 3})
def test_log_no_revisions(self):
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
td1 = TraceableDict(d1)
log = td1.log(path=("A",))
self.assertEquals(log, {})
def test_log_uncommitted_changes(self):
r1, r2 = 1, 2
d1 = {"A": {"B": {"C": 1, "D": [2, 3], "E": 4}}}
d2 = {"A": {"B": {"C": 1, "D": [2], "F": 5}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
self.assertTrue(td1.has_uncommitted_changes)
log = td1.log(path=('A',))
self.assertEquals(log.keys(), [r1])
self.assertEquals(log[r1], d1)
td1.commit(revision=r2)
self.assertFalse(td1.has_uncommitted_changes)
log = td1.log(path=('A',))
self.assertEquals(log.keys(), [r1, r2])
self.assertEquals(log[r1], d1)
self.assertEquals(log[r2], d2)
def test_log_invalid_path(self):
r1 = 1
d1 = {"A": 1, "B": 2}
d2 = {"B": 1}
td1 = TraceableDict(d1)
td2 = TraceableDict(d2)
td1 = td1 | td2
td1.commit(revision=r1)
invalid_path = None
with self.assertRaises(TypeError) as err:
td1.log(path=invalid_path)
self.assertTrue('path must be tuple' in err.exception)
invalid_path = 'A'
with self.assertRaises(TypeError) as err:
td1.log(path=invalid_path)
self.assertTrue('path must be tuple' in err.exception)
invalid_path = ()
with self.assertRaises(ValueError) as err:
td1.log(path=invalid_path)
self.assertTrue('path cannot be empty' in err.exception)
unknown_path = ('A', 'B')
log = td1.log(path=unknown_path)
self.assertEquals(log.keys(), [])
self.assertEquals(log, {})
class DiffTests(unittest.TestCase):
def test_basic(self):
r1, r2, r3, r4 = 1, 2, 3, 4
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"C": 2, "D": [2, 3]}}}
d3 = {"A": {"B": {"D": [2, 3, 5]}}}
d4 = {"A": {"B": {"D": [2, 3, 5], "E": 1}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
td1 = td1 | d4
td1.commit(revision=r4)
diff = td1.diff(revision=1)
self.assertFalse(diff)
diff = td1.diff(revision=2)
self.assertEquals(
diff,
{"A": {"B": {"C": "---1 +++2", "D": [2, 3]}}}
)
diff = td1.diff(revision=3)
self.assertEquals(
diff,
{"A": {"B": {"C": "---2", "D": "---[2, 3] +++[2, 3, 5]"}}}
)
diff = td1.diff(revision=4)
self.assertEquals(
diff,
{"A": {"B": {"D": [2, 3, 5], "E": "+++1"}}}
)
def test_with_target_path(self):
r1, r2, r3, r4 = 1, 2, 3, 4
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"C": 2, "D": [2, 3]}}}
d3 = {"A": {"B": {"D": [2, 3, 5]}}}
d4 = {"A": {"B": {"D": [2, 3, 5], "E": 1}}}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
td1 = td1 | d2
td1.commit(revision=r2)
td1 = td1 | d3
td1.commit(revision=r3)
td1 = td1 | d4
td1.commit(revision=r4)
diff = td1.diff(revision=1, path=('A', 'B'))
self.assertFalse(diff)
diff = td1.diff(revision=2, path=('A', 'B'))
self.assertEquals(
diff,
{"B": {"C": "---1 +++2", "D": [2, 3]}}
)
diff = td1.diff(revision=3, path=('A', 'B', 'C'))
self.assertEquals(
diff,
{"C": "---2"}
)
diff = td1.diff(revision=4, path=('A', 'B'))
self.assertEquals(
diff,
{"B": {"D": [2, 3, 5], "E": "+++1"}}
)
diff = td1.diff(revision=4, path=('A', 'B', 'E'))
self.assertFalse(diff)
def test_log_no_revisions(self):
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
td1 = TraceableDict(d1)
diff = td1.diff()
self.assertFalse(diff)
diff = td1.diff(revision=1)
self.assertFalse(diff)
def test_diff_base_revision(self):
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
td1 = TraceableDict(d1)
td1.commit(revision=1)
diff = td1.diff(revision=1)
self.assertFalse(diff)
def test_diff_working_tree(self):
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
d2 = {"A": {"B": {"C": 2, "D": [2, 3]}}}
td1 = TraceableDict(d1)
td1.commit(revision=1)
diff = td1.diff()
self.assertFalse(diff)
td1 = td1 | d2
diff = td1.diff()
self.assertEquals(
diff,
{"A": {"B": {"C": "---1 +++2", "D": [2, 3]}}}
)
diff = td1.diff(path=("A", "B"))
self.assertEquals(
diff,
{"B": {"C": "---1 +++2", "D": [2, 3]}}
)
def test_diff_invalid_revision(self):
d1 = {"A": {"B": {"C": 1, "D": [2, 3]}}}
td1 = TraceableDict(d1)
td1.commit(revision=1)
unknown_revision = 2
with self.assertRaises(ValueError) as err:
td1.diff(revision=unknown_revision)
self.assertTrue('unknown revision %s' % unknown_revision in err.exception)
with self.assertRaises(ValueError) as err:
td1.diff(revision=unknown_revision, path=('A', 'B'))
self.assertTrue('unknown revision %s' % unknown_revision in err.exception)
class RemoveOldestRevisionTests(unittest.TestCase):
def test_basic(self):
r1, r2, r3, r4, r5 = 1, 2, 3, 4, 5
d1 = {"a": "aa", "b":"bb"}
d2 = {"a": "a", "b":"bb"}
d3 = {"a": "a", "b":"b"}
d4 = {"a": "a"}
d5 = {"aa": "aa", "bb":"bb", "c":"c"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
self.assertEquals(td1.as_dict(), d1)
td1 = td1 | d2
td1.commit(revision=r2)
self.assertEquals(td1.as_dict(), d2)
td1 = td1 | d3
td1.commit(revision=r3)
self.assertEquals(td1.as_dict(), d3)
td1 = td1 | d4
td1.commit(revision=r4)
self.assertEquals(td1.as_dict(), d4)
td1 = td1 | d5
td1.commit(revision=r5)
self.assertEquals(td1.as_dict(), d5)
self.assertEquals(td1.revisions, [r1, r2, r3, r4, r5])
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d5)
self.assertEquals(td1.revisions, [r2, r3, r4, r5])
self.assertEquals(set(td1.trace.keys()), set([str(r3), str(r4), str(r5)]))
td_past_revision = td1.checkout(revision=r2)
self.assertEquals(td_past_revision.as_dict(), d2)
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d5)
self.assertEquals(td1.revisions, [r3, r4, r5])
self.assertEquals(set(td1.trace.keys()), set([str(r4), str(r5)]))
td_past_revision = td1.checkout(revision=r3)
self.assertEquals(td_past_revision.as_dict(), d3)
def test_remove_no_revisions(self):
d1 = {"a": "aa", "b":"bb"}
td1 = TraceableDict(d1)
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d1)
self.assertEquals(td1.trace, {})
self.assertEquals(td1.revisions, [])
def test_remove_base_revision(self):
r1, r2 = 1, 2
d1 = {"a": "a", "b":"b"}
d2 = {"a": "a_updated", "b":"b_updated"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
self.assertEquals(td1.as_dict(), d1)
td1 = td1 | d2
td1.commit(revision=r2)
self.assertEquals(td1.as_dict(), d2)
self.assertEquals(td1.revisions, [r1, r2])
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d2)
self.assertEquals(td1.revisions, [r2])
self.assertEquals(td1.trace, {})
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d2)
self.assertEquals(td1.revisions, [r2])
self.assertEquals(td1.trace, {})
def test_remove_uncommitted_changes(self):
r1, r2 = 1, 2
d1 = {"a": "a", "b":"b"}
d2 = {"a": "a_updated", "b":"b"}
d3 = {"a": "a_updated", "b":"b_updated"}
td1 = TraceableDict(d1)
td1.commit(revision=r1)
self.assertEquals(td1.as_dict(), d1)
td1 = td1 | d2
td1.commit(revision=r2)
self.assertEquals(td1.as_dict(), d2)
td1 = td1 | d3
self.assertEquals(td1.as_dict(), d3)
self.assertTrue(td1.has_uncommitted_changes)
self.assertEquals(td1.revisions, [r1, r2])
self.assertEquals(set(td1.trace.keys()), set([str(r2), uncommitted]))
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d3)
self.assertEquals(td1.revisions, [r2])
self.assertEquals(set(td1.trace.keys()), set([uncommitted]))
td1.remove_oldest_revision()
self.assertEquals(td1.as_dict(), d3)
self.assertEquals(td1.revisions, [r2])
self.assertEquals(set(td1.trace.keys()), set([uncommitted]))
td1.revert()
self.assertEquals(td1.as_dict(), d2)
self.assertEquals(td1.revisions, [r2])
self.assertEquals(td1.trace, {})
if __name__ == '__main__':
unittest.main()
| 30.518892
| 125
| 0.555574
| 4,541
| 36,348
| 4.328342
| 0.038318
| 0.202697
| 0.087967
| 0.050064
| 0.8551
| 0.807123
| 0.755838
| 0.712999
| 0.685627
| 0.621267
| 0
| 0.05664
| 0.27432
| 36,348
| 1,191
| 126
| 30.518892
| 0.688517
| 0.001513
| 0
| 0.692922
| 0
| 0
| 0.041005
| 0
| 0
| 0
| 0
| 0
| 0.406393
| 1
| 0.050228
| false
| 0
| 0.006849
| 0
| 0.067352
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8292e1fe7ef0634817cfe8b3be71ed54ca03fb23
| 14,957
|
py
|
Python
|
Sokoban/function.py
|
Tomeczek444/TomekSokoban
|
74fa67d4b324c234bb900e3fdc27833982439ee9
|
[
"MIT"
] | null | null | null |
Sokoban/function.py
|
Tomeczek444/TomekSokoban
|
74fa67d4b324c234bb900e3fdc27833982439ee9
|
[
"MIT"
] | null | null | null |
Sokoban/function.py
|
Tomeczek444/TomekSokoban
|
74fa67d4b324c234bb900e3fdc27833982439ee9
|
[
"MIT"
] | null | null | null |
import pygame,sys
from const import *
import copy
def indexe(a,elem):
x=[]
for i in range(len(a)):
for j in range(len(a[0])):
if a[i][j]in (elem):
x.append((i,j))
return x
def countincomplete(a,elem):
x=0
for i in range(len(a)):
for j in range(len(a[0])):
if a[i][j]== elem:
x+=1
return x
def moveleft(level,human):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] in ("p"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "e"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] in ("o"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "i"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] in ("s"):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] == "o":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] = "s"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "i"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] == "p":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] = "c"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "i"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] in ("c"):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] == "o":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] = "s"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "e"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] == "p":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 2] = "c"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] - 1] = "e"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
return level
def moveright(level,human):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] in ("p"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "g"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] in ("o"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "k"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] in ("s"):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] == "o":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] = "s"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "k"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] == "p":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] = "c"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "k"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] in ("c"):
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] == "o":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] = "s"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "g"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] == "p":
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 2] = "c"
level[indexe(level, human)[0][0]][indexe(level, human)[0][1] + 1] = "g"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
return level
def moveup(level,human):
if level[indexe(level, human)[0][0]-1][indexe(level, human)[0][1]] in ("p"):
level[indexe(level, human)[0][0]- 1][indexe(level, human)[0][1] ] = "f"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0] - 1][indexe(level, human)[0][1]] in ("o"):
level[indexe(level, human)[0][0] - 1][indexe(level, human)[0][1]] = "j"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0] - 1][indexe(level, human)[0][1]] in ("s"):
if level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] == "o":
level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] = "s"
level[indexe(level, human)[0][0] - 1][indexe(level, human)[0][1]] = "j"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] == "p":
level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] = "c"
level[indexe(level, human)[0][0]- 1][indexe(level, human)[0][1] ] = "j"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0]-1][indexe(level, human)[0][1]] in ("c"):
if level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] == "o":
level[indexe(level, human)[0][0]- 2][indexe(level, human)[0][1] ] = "s"
level[indexe(level, human)[0][0]- 1][indexe(level, human)[0][1] ] = "f"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0] - 2][indexe(level, human)[0][1]] == "p":
level[indexe(level, human)[0][0]- 2][indexe(level, human)[0][1] ] = "c"
level[indexe(level, human)[0][0] - 1][indexe(level, human)[0][1]] = "f"
if level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
return level
def movedown(level,human):
if level[indexe(level, human)[0][0]+1][indexe(level, human)[0][1]] in ("p"):
level[indexe(level, human)[0][0]+ 1][indexe(level, human)[0][1] ] = "h"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0] + 1][indexe(level, human)[0][1]] in ("o"):
level[indexe(level, human)[0][0] + 1][indexe(level, human)[0][1]] = "l"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0] + 1][indexe(level, human)[0][1]] in ("s"):
if level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] == "o":
level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] = "s"
level[indexe(level, human)[0][0] + 1][indexe(level, human)[0][1]] = "l"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[1][0]][indexe(level, human)[1][1]] = "p"
elif level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] == "p":
level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] = "c"
level[indexe(level, human)[0][0]+ 1][indexe(level, human)[0][1] ] = "l"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0]+1][indexe(level, human)[0][1]] in ("c"):
if level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] == "o":
level[indexe(level, human)[0][0]+ 2][indexe(level, human)[0][1] ] = "s"
level[indexe(level, human)[0][0]+ 1][indexe(level, human)[0][1] ] = "h"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
elif level[indexe(level, human)[0][0] + 2][indexe(level, human)[0][1]] == "p":
level[indexe(level, human)[0][0]+ 2][indexe(level, human)[0][1] ] = "c"
level[indexe(level, human)[0][0] + 1][indexe(level, human)[0][1]] = "h"
if level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] in ("i", "j", "k", "l"):
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "o"
else:
level[indexe(level, human)[0][0]][indexe(level, human)[0][1]] = "p"
return level
def printmap(screen,level):
for i in range(len(level)):
for j in range(len(level[i])):
if level[i][j]=="x":
screen.blit(wall,[width_field*j,width_field*i])
elif level[i][j]=="o":
screen.blit(ground, [width_field * j, width_field * i])
elif level[i][j]=="s":
screen.blit(store, [width_field * j, width_field * i])
elif level[i][j]=="p":
screen.blit(object, [width_field * j, width_field * i])
elif level[i][j] == "c":
screen.blit(object_store, [width_field * j, width_field * i])
elif level[i][j] == "i":
screen.blit(mover_left, [width_field * j, width_field * i])
elif level[i][j] == "j":
screen.blit(mover_up, [width_field * j, width_field * i])
elif level[i][j] == "k":
screen.blit(mover_right, [width_field * j, width_field * i])
elif level[i][j] == "l":
screen.blit(mover_down, [width_field * j, width_field * i])
elif level[i][j] == "e":
screen.blit(store_left, [width_field * j, width_field * i])
elif level[i][j] == "f":
screen.blit(store_up, [width_field * j, width_field * i])
elif level[i][j] == "g":
screen.blit(store_right, [width_field * j, width_field * i])
elif level[i][j] == "h":
screen.blit(store_down, [width_field * j, width_field * i])
return None
| 62.320833
| 101
| 0.508926
| 2,292
| 14,957
| 3.305846
| 0.024869
| 0.385377
| 0.608156
| 0.47565
| 0.946813
| 0.943117
| 0.943117
| 0.939158
| 0.939158
| 0.939158
| 0
| 0.057263
| 0.238751
| 14,957
| 239
| 102
| 62.58159
| 0.608203
| 0
| 0
| 0.726457
| 0
| 0
| 0.015311
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03139
| false
| 0
| 0.013453
| 0
| 0.076233
| 0.004484
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
82dd610bc514cd5c146316af67d68805315d4fce
| 947
|
py
|
Python
|
0x05-python-exceptions/dev/0-main.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | null | null | null |
0x05-python-exceptions/dev/0-main.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | null | null | null |
0x05-python-exceptions/dev/0-main.py
|
johncoleman83/bootcampschool-higher_level_programming
|
a83c3b7092cfe893c87e495f8d8eec9228c9b808
|
[
"MIT"
] | 1
|
2020-09-25T17:54:36.000Z
|
2020-09-25T17:54:36.000Z
|
#!/usr/bin/python3
safe_print_list = __import__('0-safe_print_list').safe_print_list
my_list = [1, 2, 3, 4, 5]
a = []
b = ['hi', 'there', 'how', 'are', 'you']
c = [my_list, a, b]
d = [(5, 3, 6), 66, "string"]
e = [None, (4, 5)]
f = [5]
nb_print = safe_print_list(my_list, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(my_list, len(my_list))
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(my_list, len(my_list) + 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(a, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(b, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(c, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(d, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(e, 2)
print("nb_print: {:d}".format(nb_print))
nb_print = safe_print_list(f, 2)
print("nb_print: {:d}".format(nb_print))
| 32.655172
| 65
| 0.680042
| 179
| 947
| 3.251397
| 0.178771
| 0.324742
| 0.350515
| 0.247423
| 0.819588
| 0.786942
| 0.786942
| 0.752577
| 0.706186
| 0.706186
| 0
| 0.027091
| 0.103485
| 947
| 28
| 66
| 33.821429
| 0.658422
| 0.017951
| 0
| 0.346154
| 0
| 0
| 0.17761
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.038462
| 0
| 0.038462
| 0.730769
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
815776b17f5a3bdd386584f3bd7ac1ad631d2bec
| 110
|
py
|
Python
|
str8.py
|
Kantheesh/Learning-Python
|
d2dc9f1b9f652e6a6d84028e86a1daf77551eb5f
|
[
"MIT"
] | null | null | null |
str8.py
|
Kantheesh/Learning-Python
|
d2dc9f1b9f652e6a6d84028e86a1daf77551eb5f
|
[
"MIT"
] | null | null | null |
str8.py
|
Kantheesh/Learning-Python
|
d2dc9f1b9f652e6a6d84028e86a1daf77551eb5f
|
[
"MIT"
] | null | null | null |
inp = "Ajay KumaR"
out = inp.istitle()
print(out)
inp = "Ajay Kumar"
out = inp.istitle()
print(out)
| 12.222222
| 20
| 0.6
| 16
| 110
| 4.125
| 0.375
| 0.272727
| 0.363636
| 0.454545
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0.236364
| 110
| 9
| 21
| 12.222222
| 0.785714
| 0
| 0
| 0.666667
| 0
| 0
| 0.194175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
81716cf121bd23dde90b04e6c1b298627555b801
| 40,095
|
py
|
Python
|
app.py
|
code-entity/Wheelspace
|
756eee8c82c6a8017acd94e52e531695b402128b
|
[
"MIT"
] | null | null | null |
app.py
|
code-entity/Wheelspace
|
756eee8c82c6a8017acd94e52e531695b402128b
|
[
"MIT"
] | null | null | null |
app.py
|
code-entity/Wheelspace
|
756eee8c82c6a8017acd94e52e531695b402128b
|
[
"MIT"
] | null | null | null |
from flask import Flask,redirect,render_template,url_for, session, request, flash
import json
import os
from flask_mysqldb import MySQL
import MySQLdb.cursors
from passlib.hash import sha256_crypt
from flask_mail import Message,Mail
from flask_fontawesome import FontAwesome
app = Flask(__name__)
fa = FontAwesome(app)
with open("db1.json","r") as f:
data= json.load(f)["data"]
app.secret_key= os.urandom(24)
app.config['MYSQL_HOST'] = data["host"]
app.config['MYSQL_USER'] = data["db_user"]
app.config['MYSQL_PASSWORD'] = data["password"]
app.config['MYSQL_DB'] = data["db_name"]
app.config['MYSQL_PORT'] = data["port"]
app.config['MYSQL_UNIX_SOCKET'] =None
app.config['MYSQL_CONNECT_TIMEOUT'] =None
app.config['MYSQL_READ_DEFAULT_FILE'] =None
app.config['MYSQL_USE_UNICODE']=None
app.config['MYSQL_CHARSET']= None
app.config['MYSQL_SQL_MODE'] = None
app.config['MYSQL_CURSORCLASS'] = None
#For mail system
app.config['MAIL_SERVER']= data["mail_host"]
app.config['MAIL_PORT'] = data["mail_port"]
app.config['MAIL_USERNAME'] = data["mail_username"]
app.config['MAIL_PASSWORD'] = data["mail_pass"]
app.config['MAIL_USE_TLS'] = False
app.config['MAIL_USE_SSL'] = True
mail = Mail(app)
mysql = MySQL(app)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/contact')
def contact():
return render_template('contact.html')
@app.route('/userlayout')
def userlayout():
return render_template('userlayout.html')
@app.route('/add_loc')
def add_loc():
return render_template('adm_addloc.html')
@app.route('/feedback_form')
def feedback_form():
if "user" in session and session["user"] == True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from parking_lot")
result = db.fetchall()
db.close()
return render_template('feedback.html',data=result)
else:
return redirect(url_for("userlog"))
@app.route('/usercontact')
def usercontact():
if "user" in session and session["user"] == True:
return render_template('usercontact.html')
else:
return redirect(url_for('userlog'))
@app.route('/admreg')
def admreg():
return render_template('admreg.html')
@app.route('/offline_user_book_admin')
def offline_user_book_admin():
if "admin" in session and session["admin"] == True:
return render_template('offline_user_book_admin.html')
else:
return redirect(url_for('admlog'))
@app.route('/staffreg')
def staffreg():
return render_template('staffreg.html')
@app.route('/staffreg_admin')
def staffreg_admin():
if "admin" in session and session["admin"] == True:
return render_template('staffreg_admin.html')
else:
return redirect(url_for('admlog'))
@app.route('/dash_staff')
def dash_staff():
if "staff" in session and session["staff"] == True:
db = mysql.connection.cursor()
db.execute("select count(*) from online_user")
result2 = db.fetchone()
db.execute("select count(*) from offline_user")
result3 = db.fetchone()
db.close()
return render_template('dash_staff.html',data=[result2,result3])
else:
return redirect(url_for('stafflog'))
@app.route('/dash_admin')
def dash_admin():
if "admin" in session and session["admin"] == True:
db = mysql.connection.cursor()
db.execute("select count(*) from staff")
result1 = db.fetchone()
db.execute("select count(*) from online_user")
result2 = db.fetchone()
db.execute("select count(*) from offline_user")
result3 = db.fetchone()
db.close()
return render_template('dash_admin.html',data=[result1,result2,result3])
else:
return redirect(url_for('admlog'))
@app.route('/dash_user')
def dash_user():
if "user" in session and session["user"] == True:
return render_template('dash_user.html')
else:
return redirect(url_for('userlog'))
@app.route('/search_results')
def search_results():
return render_template('search_results.html')
@app.route('/search_admin_results')
def search_admin_results():
if "admin" in session and session["admin"] == True:
return render_template('search_admin_results.html')
else:
return redirect(url_for("admlog"))
@app.route('/search_results_admin')
def search_results_admin():
if "admin" in session and session["admin"] == True:
return render_template('search_results_admin.html')
else:
return redirect(url_for("admlog"))
@app.route('/carreg')
def carreg():
if "staff" in session and session["staff"]==True:
return render_template('regcar.html')
else:
return redirect(url_for('stafflog'))
@app.route('/carreg_admin')
def carreg_admin():
if "admin" in session and session["admin"]==True:
return render_template('regcar_admin.html')
else:
return redirect(url_for('admlog'))
@app.route('/regular_user')
def regular():
if "staff" in session and session["staff"] == True:
return render_template('regular_user.html')
else:
return redirect(url_for('stafflog'))
@app.route('/regularadmin')
def regular_admin():
if "admin" in session and session["admin"] == True:
return render_template('regular_user_admin.html')
else:
return redirect(url_for('admlog'))
@app.route('/usereg')
def usereg():
return render_template('userreg.html')
@app.route('/usereg_admin')
def usereg_admin():
if "admin" in session and session["admin"] == True:
return render_template('userreg_admin.html')
else:
return redirect(url_for('admlog'))
@app.route('/onlinereg')
def onlinereg():
return render_template('onlinebook.html')
@app.route('/admlog')
def admlog():
return render_template('admlog.html')
@app.route("/onlines_list")
def onlines_list():
if "staff" in session or "admin" in session:
return render_template("onlines_list.html")
else:
return redirect(url_for("admlog"))
@app.route('/stafflog')
def stafflog():
return render_template('stafflog.html')
@app.route('/userlog')
def userlog():
return render_template('userlog.html')
@app.route('/admin_data', methods=['POST']) #admin registration data
def admin_data():
if request.method == 'POST':
email = request.form['mail']
name = request.form['nam']
pas = request.form['pass']
rpas = request.form['repass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from admin where email=%s",(email,))
result = db.fetchone()
if result is not None:
flash("Email already taken", "error")
db.close()
return redirect(url_for('admreg'))
else:
if pas == rpas:
hash_pas = sha256_crypt.hash(pas)
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("INSERT INTO admin (email,name,password) VALUES (%s,%s,%s)",(email,name,hash_pas))
mysql.connection.commit()
db.close()
return redirect(url_for("admlog"))
else:
flash("Password did not match", "error")
return redirect(url_for('admreg'))
else:
flash("Some error occured try again","error")
return redirect(url_for('admreg'))
@app.route('/staff_data',methods=['POST']) #staff registration form data
def staff_data():
if request.method == 'POST':
name = request.form['nam']
mail = request.form['mail']
phone = request.form['phone']
pas = request.form['pass']
rpas = request.form['repass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from staff where email=%s",(mail,))
result = db.fetchone()
if result is not None:
flash("Email already taken","error")
db.close()
return redirect(url_for("staffreg"))
else:
if pas == rpas:
hash_pas = sha256_crypt.hash(pas)
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into staff (name,email,phone,password) values(%s,%s,%s,%s)",(name,mail,phone,hash_pas))
mysql.connection.commit()
db.close()
return redirect(url_for("stafflog"))
else:
flash("Password did not match", "error")
return redirect(url_for('staffreg'))
else:
flash("Some error occured try again","error")
return redirect(url_for('staffreg'))
@app.route('/staff_data_admin',methods=['POST']) #staff registration form data for admin dashboard
def staff_data_admin():
if request.method == 'POST':
name = request.form['nam']
mail = request.form['mail']
phone = request.form['phone']
pas = request.form['pass']
rpas = request.form['repass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from staff where email=%s",(mail,))
result = db.fetchone()
if result is not None:
flash("Email already taken","error")
db.close()
return redirect(url_for("staffreg_admin"))
else:
if pas == rpas:
hash_pas = sha256_crypt.hash(pas)
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into staff (name,email,phone,password) values(%s,%s,%s,%s)",(name,mail,phone,hash_pas))
mysql.connection.commit()
db.close()
flash("Staff registration successfull","success")
return redirect(url_for("dash_admin"))
else:
flash("Password did not match", "error")
return redirect(url_for('staffreg_admin'))
else:
flash("Some error occured try again","error")
return redirect(url_for('staffreg_admin'))
@app.route('/off_data',methods=['POST']) # first time offline data registration for staff dashboard
def off_data():
if request.method == 'POST':
name = request.form['nam']
mail = request.form['mail']
phone = request.form['phone']
vech_no = request.form['vech_no']
vech_type = request.form['vech_type']
lic_no = request.form['lic_no']
duration = request.form['dur']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select license_no from offline_user where license_no=%s",(lic_no,))
result = db.fetchone()
#print(result)
if result is not None:
flash("License Number used","error")
db.close()
return redirect(url_for('carreg'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from offline_user where email=%s", (mail,))
result1 = db.fetchone()
if result1 is not None:
flash("Email already used","error")
db.close()
return redirect(url_for('carreg'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into offline_user values (%s,%s,%s,%s,%s,%s,%s)",(lic_no,name,mail,phone,vech_no,vech_type,duration))
mysql.connection.commit()
db.close()
flash("Data inserted successfully","success")
return redirect(url_for('carreg'))
else:
flash("some error occured","error")
return redirect(url_for('stafflog'))
@app.route('/off_data_admin',methods=['POST']) # first time offline data registration for admin dashboard
def off_data_admin():
if request.method == 'POST':
name = request.form['nam']
mail = request.form['mail']
phone = request.form['phone']
vech_no = request.form['vech_no']
vech_type = request.form['vech_type']
lic_no = request.form['lic_no']
duration = request.form['dur']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select license_no from offline_user where license_no=%s",(lic_no,))
result = db.fetchone()
#print(result)
if result is not None:
flash("License Number used","error")
db.close()
return redirect(url_for('carreg_admin'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from offline_user where email=%s", (mail,))
result1 = db.fetchone()
if result1 is not None:
flash("Email already used","error")
db.close()
return redirect(url_for('carreg_admin'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into offline_user values (%s,%s,%s,%s,%s,%s,%s)",(lic_no,name,mail,phone,vech_no,vech_type,duration))
mysql.connection.commit()
db.close()
flash("Data inserted successfully","success")
return redirect(url_for('dash_admin'))
else:
flash("some error occured","error")
return redirect(url_for('admlog'))
@app.route('/off_extend_data', methods=["POST"]) #for multiple offline booking
def off_extend_user():
if request.method == 'POST':
lic_no = request.form['lic_no']
date = request.form['dat']
duration = request.form['dur']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
#db.execute("select license_no from offline_user where license_no=%s",(lic_no,))
#result = db.fetchone()
try:
db.execute("insert into offline_extended (license_no,dates,duration)values (%s,%s,%s)",(lic_no,date,duration))
mysql.connection.commit()
db.close()
flash("Parking Booked Successfully","success")
return redirect(url_for('dash_staff'))
except MySQLdb._exceptions.IntegrityError: #since license no is primary key in offline_user and foreign key in offline_extended table both license no should match
flash("Insert a valid license number","error")
return redirect(url_for('regular'))
else:
flash("Some error occured", "error")
return redirect(url_for('stafflog'))
@app.route('/off_extend_data_admin', methods=["POST"]) #for multiple offline booking for admin dashboard
def off_extend_user_data():
if request.method == 'POST':
lic_no = request.form['lic_no']
date = request.form['dat']
duration = request.form['dur']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
#db.execute("select license_no from offline_user where license_no=%s",(lic_no,))
#result = db.fetchone()
try:
db.execute("insert into offline_extended (license_no,dates,duration)values (%s,%s,%s)",(lic_no,date,duration))
mysql.connection.commit()
db.close()
flash("Parking Booked Successfully","success")
return redirect(url_for('dash_admin'))
except MySQLdb._exceptions.IntegrityError: #since license no is primary key in offline_user and foreign key in offline_extended table both license no should match
flash("Insert a valid license number","error")
return redirect(url_for('regular_admin'))
else:
flash("Some error occured", "error")
return redirect(url_for('admlog'))
@app.route('/admin_log', methods=['POST']) #admin login
def admin_log():
if request.method == 'POST':
mail = request.form['mail']
pas = request.form['pass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from admin where email=%s",(mail,))
result = db.fetchone()
db.close()
if result is not None:
dec_hash = sha256_crypt.verify(pas,result["password"])
if dec_hash == True:
session["email"] = result['email']
session["name"] = result["name"]
session["admin"] = True
return redirect(url_for('dash_admin'))
else:
flash("Password incorrect","error")
return redirect(url_for('admlog'))
else:
flash("Email not found", "error")
return redirect(url_for('admlog'))
else:
flash("Some error occured try again", "error")
return redirect(url_for('admlog'))
@app.route('/staff_log',methods=['POST']) #staff login
def staff_log():
if request.method == 'POST':
mail = request.form['mail']
pas = request.form['pass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from staff where email=%s",(mail,))
result=db.fetchone()
db.close()
if result is not None:
pas_verify = sha256_crypt.verify(pas,result["password"])
if pas_verify ==True:
session["staff_mail"] = result["email"]
session["staff_name"] = result["name"]
session["staff_phone"] = result["phone"]
session["staff"] = True
flash(f" {result['name']}","success")
return redirect(url_for('dash_staff'))
else:
flash("Password did not match","error")
return redirect(url_for('stafflog'))
else:
flash("Email not found","error")
return redirect(url_for('stafflog'))
else:
session.clear()
flash("Some error occured","error")
return redirect(url_for('stafflog'))
@app.route('/search_data', methods=['POST']) #to search users license no and book slot for offline users staff
def search_data():
if request.method == 'POST':
query = request.form['search']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from offline_user where license_no=%s",(query,))
result = db.fetchone()
db.close()
return render_template('search_results.html',data=result)
@app.route('/search_data_admin', methods=['POST']) #to search users license no and book slot for offline users for admin dashboard
def search_data_admin():
if request.method == 'POST':
query = request.form['search']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from offline_user where license_no=%s",(query,))
result = db.fetchone()
db.close()
return render_template('search_results_admin.html',data=result)
@app.route('/book/<string:id>') #books slot for offlne users for staff dashboard
def book(id):
d = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select license_no from offline_user where license_no=%s",([d])) #[d] bcz tuple is not iteratable so used list to traverse
result = db.fetchone()
db.close()
if result is not None:
return render_template('regular_user.html',data = result)
else:
return redirect(url_for('stafflog'))
@app.route('/book_admin/<string:id>') #books slot for offlne users for admin dashboard
def book_admin(id):
d = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select license_no from offline_user where license_no=%s",([d])) #[d] bcz tuple is not iteratable so used list to traverse
result = db.fetchone()
db.close()
if result is not None:
return render_template('regular_user_admin.html',data = result)
else:
return redirect(url_for('admlog'))
@app.route("/online_data",methods=['POST']) #online user registration for all users
def online_data():
if request.method == 'POST':
name = request.form['nam']
email = request.form['mail']
phone = request.form['phone']
lic_no = request.form['lic_no']
pas = request.form['pass']
rpas = request.form['repass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from online_user where license_no=%s",(lic_no,))
result = db.fetchone()
db.close()
if result is not None:
flash("License number is already used","error")
return redirect(url_for('usereg'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from online_user where email=%s", (email,))
result = db.fetchone()
db.close()
if result is not None:
flash("Email is already used", "error")
return redirect(url_for('usereg'))
else:
if pas == rpas:
hash_pas = sha256_crypt.hash(pas)
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into online_user values(%s,%s,%s,%s,%s)",(lic_no,name,email,phone,hash_pas))
mysql.connection.commit()
db.close()
flash("Registration successfull","success")
return redirect(url_for('userlog'))
else:
flash("Password did not match","error")
return redirect(url_for("usereg"))
else:
flash("Some error occured", "error")
return redirect(url_for("usereg"))
@app.route("/online_data_admin",methods=['POST']) #online user registration for admin dashboard
def online_data_admin():
if request.method == 'POST':
name = request.form['nam']
email = request.form['mail']
phone = request.form['phone']
lic_no = request.form['lic_no']
pas = request.form['pass']
rpas = request.form['repass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from online_user where license_no=%s",(lic_no,))
result = db.fetchone()
db.close()
if result is not None:
flash("License number is already used","error")
return redirect(url_for('usereg_admin'))
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select email from online_user where email=%s", (email,))
result = db.fetchone()
db.close()
if result is not None:
flash("Email is already used", "error")
return redirect(url_for('usereg_admin'))
else:
if pas == rpas:
hash_pas = sha256_crypt.hash(pas)
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into online_user values(%s,%s,%s,%s,%s)",(lic_no,name,email,phone,hash_pas))
mysql.connection.commit()
db.close()
flash("User Registration successfull","success")
return redirect(url_for('dash_admin'))
else:
flash("Password did not match","error")
return redirect(url_for("usereg_admin"))
else:
flash("Some error occured", "error")
return redirect(url_for("usereg"))
@app.route("/user_log",methods=["POST"]) #online user login
def user_log():
if request.method == 'POST':
email = request.form['mail']
pas = request.form['pass']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_user where email=%s",(email,))
result = db.fetchone()
db.close()
if result is not None:
pas_verify = sha256_crypt.verify(pas, result["password"])
if pas_verify == True:
session["user_name"] = result["name"]
session["user_mail"] = result['email']
session["user"] = True
session["lic_no"] = result["license_no"]
session["phone"] = result["phone"]
flash(f"{result['name']}","success")
return redirect(url_for("dash_user"))
else:
flash("Password did not match", "error")
return redirect(url_for("userlog"))
else:
flash("Email not found", "error")
return redirect(url_for("userlog"))
else:
flash("Some error occured", "error")
return redirect(url_for("userlog"))
@app.route("/booking/<id>") # online user booking
def booking(id):
if "user" in session and session["user"] == True:
data = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select license_no from online_user where license_no=%s",(data,))
result = db.fetchone()
db.execute("select * from parking_lot")
result1 = db.fetchall()
db.close()
if result is not None:
return render_template("onlinebook.html", dat = result["license_no"],datas=result1)
else:
flash("Some error occured try again","error") #if he uses to enter his own or other lic no in url provides error
session.clear()
return redirect(url_for('userlog'))
else:
session.clear()
flash("Some error occured try again", "error")
return redirect(url_for('userlog'))
@app.route("/confirm_booking",methods=["POST"]) # inserting vehicle details to db of online user after log in
def confirm_booking():
if "user" in session and session["user"] == True:
if request.method == 'POST':
vech_no = request.form["vech_no"]
vech_type = request.form["vech_type"]
lic_no = request.form["lic_no"]
duration = request.form["dur"]
date = request.form["dat"]
lot_id = request.form["loc"]
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into online_booking (license_no,vech_no,vech_type,duration,dates,lot_id) values (%s,%s,%s,%s,%s,%s)",(lic_no,vech_no,vech_type,duration,date,lot_id))
mysql.connection.commit()
db.close()
flash("Parking booked successfully","success")
return redirect(url_for("dash_user"))
else:
session.clear()
flash("some error occured", "error")
return redirect(url_for("userlog"))
else:
session.clear()
flash("some error occured","error")
return redirect(url_for("userlog"))
@app.route("/online_list",methods=["POST","GET"]) #online booking list for admin datewise
def online_list():
if "admin" in session and session["admin"] ==True:
db=mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_booking order by dates desc")
result = db.fetchall()
db.close()
if result is not None:
return render_template("onlines_list.html", data=result)
else:
return redirect(url_for("admlog"))
@app.route("/online_list_staffs",methods=["POST","GET"]) #online booking list for staff datewise
def online_list_staffs():
if "staff" in session and session["staff"] ==True:
db=mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_booking order by dates desc")
result = db.fetchall()
db.close()
if result is not None:
return render_template("online_list_staff.html", data=result)
else:
return redirect(url_for("stafflog"))
@app.route("/search_admin_data",methods=["POST"]) #search bar data for admin
def search_admin_data():
if "admin" in session and session["admin"] == True:
if request.method == 'POST':
query = request.form["search"]
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_user where license_no=%s",(query,))
result = db.fetchone()
db.close()
if result is not None:
return render_template("search_admin_results.html",data=result)
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from offline_user where license_no=%s", (query,))
result = db.fetchone()
db.close()
if result is not None:
return render_template("search_admin_results.html", data=result)
else:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from staff where name=%s", (query,))
result = db.fetchone()
db.close()
if result is not None:
return render_template("search_admin_results.html", data=result)
else:
return redirect(url_for("search_admin_results"))
else:
return redirect(url_for("admlog"))
@app.route("/online_list_edit") #display all online users to the admin
def online_list_edit():
if "admin" in session and session["admin"]==True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_user")
result = db.fetchall()
db.close()
return render_template("online_list_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/edit/<id>") #display data of selected user for editing admin dashboard
def online_edit(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_user where license_no=%s",(lic,))
result = db.fetchone()
db.close()
return render_template("online_data_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/online_user_data_update/<id>",methods=["POST"]) #update edited online user data
def online_user_data_update(id):
if "admin" in session and session["admin"]==True:
lic = id
name = request.form["nam"]
mail = request.form["mail"]
phone = request.form["phone"]
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("update online_user set name=%s,email=%s,phone=%s where license_no=%s", (name,mail,phone,lic))
mysql.connection.commit()
db.close()
flash("Online User data updated Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route("/delete/<id>") #delete online user records from db admin dashboard
def online_delete(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("delete from online_user where license_no=%s",(lic,))
mysql.connection.commit()
db.close()
flash("User deleted Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route("/offline_list_edit") #display all offline users to the admin (offline users)
def offline_list_edit():
if "admin" in session and session["admin"]==True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from offline_user")
result = db.fetchall()
db.close()
return render_template("offline_list_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/edit_offline/<id>") #display data of selected user for editing admin dashboard(offline users)
def offline_edit(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from offline_user where license_no=%s",(lic,))
result = db.fetchone()
db.close()
return render_template("offline_data_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/offline_user_data_update/<id>",methods=["POST"]) #update edited offline user data
def offline_user_data_update(id):
if "admin" in session and session["admin"]==True:
lic = id
name = request.form["nam"]
mail = request.form["mail"]
phone = request.form["phone"]
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("update offline_user set name=%s,email=%s,phone=%s where license_no=%s", (name,mail,phone,lic))
mysql.connection.commit()
db.close()
flash("Offline User data updated Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route("/delete_offline/<id>") #delete offline user records from db admin dashboard
def offline_delete(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("delete from offline_user where license_no=%s",(lic,))
mysql.connection.commit()
db.close()
flash("User deleted Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route("/staff_list_edit") #display all staff users to the admin (staff users)
def staff_list_edit():
if "admin" in session and session["admin"]==True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from staff")
result = db.fetchall()
db.close()
return render_template("staff_list_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/edit_staff/<int:id>") #display data of staff for editing admin dashboard
def staff_edit(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from staff where staff_id=%s",(lic,))
result = db.fetchone()
db.close()
return render_template("staff_data_edit.html",data=result)
else:
return redirect(url_for("admlog"))
@app.route("/staff_user_data_update/<int:id>",methods=["POST"]) #update edited staff user data
def staff_user_data_update(id):
if "admin" in session and session["admin"]==True:
lic = id
name = request.form["nam"]
mail = request.form["mail"]
phone = request.form["phone"]
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("update staff set name=%s,email=%s,phone=%s where staff_id=%s", (name,mail,phone,lic))
mysql.connection.commit()
db.close()
flash("Staff data updated Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route("/delete_staff/<int:id>") #delete staff user records from db admin dashboard
def staff_delete(id):
if "admin" in session and session["admin"] ==True:
lic = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("delete from staff where staff_id=%s",(lic,))
mysql.connection.commit()
db.close()
flash("Staff deleted Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route('/all_mail',methods=["POST"]) #general query help line
def all_mail():
if request.method == 'POST':
data = request.form["mail"]
msg1 = request.form["msg"]
msg = Message("[GENERAL] Queries",sender=data,recipients=["harshithkumar40@gmail.com"])
msg.body = msg1
mail.send(msg)
return redirect(url_for("index"))
@app.route('/user_mail',methods=["POST"]) #query related booking
def user_mail():
if request.method == 'POST':
data = request.form["mail"]
msg1 = request.form["msg"]
msg = Message("[BOOKING] related Query",sender=data,recipients=["harshithkumar40@gmail.com"])
msg.body = msg1
mail.send(msg)
return redirect(url_for("dash_user"))
@app.route('/history/<id>') #history to track of users booking with date and vehicles
def history(id):
if "user" in session and session["user"] == True:
data = id
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from online_booking where license_no=%s",(data,))
result = db.fetchall()
return render_template("bookhistory.html",data=result)
else:
return redirect(url_for("userlog"))
@app.route('/feedback',methods=['GET']) #for feedback user dashboard
def feedback():
name = request.args.get("nam")
msg = request.args.get("msg")
loc_id = request.args.get("loc")
lic = request.args.get("lic")
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into feedback(license_no,message,location,name) values (%s,%s,%s,%s)",(lic,msg,loc_id,name))
mysql.connection.commit()
db.close()
flash("Feedback Submitted Successfully","success")
return redirect(url_for("dash_user"))
@app.route('/add_location', methods=["POST"])
def add_location():
if request.method == 'POST':
loc = request.form["loc"]
pin = request.form['pincode']
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("insert into parking_lot(location,pincode)values(%s,%s)",(loc,pin))
mysql.connection.commit()
db.close()
flash("location added Successfully","success")
return redirect(url_for("dash_admin"))
else:
return redirect(url_for("admlog"))
@app.route('/logout_admin')
def logout_admin():
session.clear()
return redirect(url_for('index'))
@app.route('/logout_staff')
def logout_staff():
session.clear()
return redirect(url_for('index'))
@app.route('/logout_user')
def logout_user():
session.clear()
return redirect(url_for('index'))
#################################feedback
@app.route('/feedback_form_user')
def feedback_form():
if "user" in session and session["user"] == True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from parking_lot")
result = db.fetchall()
db.close()
return render_template('feedback.html',data=result)
else:
return redirect(url_for("userlog"))
@app.route('/feedback_form_staff')
def feedback_form():
if "staff" in session and session["staff"] == True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from parking_lot")
result = db.fetchall()
db.close()
return render_template('feedback.html',data=result)
else:
return redirect(url_for("safflog"))
@app.route('/feedback_form_parking')
def feedback_form():
if "staff" in session and session["staff"] == True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from parking_lot")
result = db.fetchall()
db.close()
return render_template('feedback.html',data=result)
else:
return redirect(url_for("safflog"))
@app.route('/feedback_form_parking')
def feedback_form():
if "user" in session and session["user"] == True:
db = mysql.connection.cursor(MySQLdb.cursors.DictCursor)
db.execute("select * from parking_lot")
result = db.fetchall()
db.close()
return render_template('feedback.html',data=result)
else:
return redirect(url_for("park"))
if __name__ == '__main__':
app.run(debug=True)
| 39.002918
| 180
| 0.618631
| 4,911
| 40,095
| 4.927306
| 0.055589
| 0.026283
| 0.073766
| 0.086784
| 0.823126
| 0.807587
| 0.785767
| 0.762129
| 0.729606
| 0.709852
| 0
| 0.001784
| 0.245143
| 40,095
| 1,027
| 181
| 39.040896
| 0.797727
| 0
| 0
| 0.687023
| 0
| 0.004362
| 0.226396
| 0.029975
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.032715
| 0.008724
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
81c8271400bfad89d18fca0c11713f0cf16acd51
| 166
|
py
|
Python
|
teamcity/flake8_plugin.py
|
MyBook/teamcity-messages
|
328402756f83a9b1e060ab190d12372841d3c5ee
|
[
"Apache-2.0"
] | null | null | null |
teamcity/flake8_plugin.py
|
MyBook/teamcity-messages
|
328402756f83a9b1e060ab190d12372841d3c5ee
|
[
"Apache-2.0"
] | null | null | null |
teamcity/flake8_plugin.py
|
MyBook/teamcity-messages
|
328402756f83a9b1e060ab190d12372841d3c5ee
|
[
"Apache-2.0"
] | null | null | null |
try:
from flake8.formatting import base # noqa
except ImportError:
from .flake8_v2_plugin import * # noqa
else:
from .flake8_v3_plugin import * # noqa
| 23.714286
| 46
| 0.710843
| 22
| 166
| 5.181818
| 0.590909
| 0.263158
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03876
| 0.222892
| 166
| 6
| 47
| 27.666667
| 0.844961
| 0.084337
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
481769ee86335b366a966b1157abf940852b8f42
| 422
|
py
|
Python
|
colour/algebra/coordinates/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 2
|
2020-05-03T20:15:42.000Z
|
2021-04-09T18:19:06.000Z
|
colour/algebra/coordinates/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | null | null | null |
colour/algebra/coordinates/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 1
|
2019-12-11T19:48:27.000Z
|
2019-12-11T19:48:27.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .transformations import (
cartesian_to_spherical, spherical_to_cartesian, cartesian_to_polar,
polar_to_cartesian, cartesian_to_cylindrical, cylindrical_to_cartesian)
__all__ = [
'cartesian_to_spherical', 'spherical_to_cartesian', 'cartesian_to_polar',
'polar_to_cartesian', 'cartesian_to_cylindrical',
'cylindrical_to_cartesian'
]
| 30.142857
| 77
| 0.7891
| 48
| 422
| 6.25
| 0.291667
| 0.22
| 0.266667
| 0.293333
| 0.773333
| 0.773333
| 0.773333
| 0.773333
| 0.773333
| 0.773333
| 0
| 0.002703
| 0.123223
| 422
| 13
| 78
| 32.461538
| 0.808108
| 0.049763
| 0
| 0
| 0
| 0
| 0.320802
| 0.230576
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4863572ed141a129334704c5d439ebc35338dc5e
| 174
|
py
|
Python
|
erpnext_quota/tasks.py
|
victor-abz/erpnext_quota
|
1c806e0e71be9820c4369546d3acee49ead8e53a
|
[
"MIT"
] | 34
|
2020-09-04T09:26:37.000Z
|
2022-03-21T18:49:00.000Z
|
erpnext_quota/tasks.py
|
victor-abz/erpnext_quota
|
1c806e0e71be9820c4369546d3acee49ead8e53a
|
[
"MIT"
] | 3
|
2020-12-15T14:25:50.000Z
|
2021-05-16T06:46:10.000Z
|
erpnext_quota/tasks.py
|
victor-abz/erpnext_quota
|
1c806e0e71be9820c4369546d3acee49ead8e53a
|
[
"MIT"
] | 48
|
2020-06-07T15:54:33.000Z
|
2022-03-15T05:56:19.000Z
|
from erpnext_quota.erpnext_quota.quota import validate_files_space_limit, validate_db_space_limit
def daily():
validate_files_space_limit()
validate_db_space_limit()
| 34.8
| 97
| 0.844828
| 25
| 174
| 5.32
| 0.44
| 0.300752
| 0.270677
| 0.345865
| 0.646617
| 0.646617
| 0.646617
| 0.646617
| 0
| 0
| 0
| 0
| 0.097701
| 174
| 5
| 98
| 34.8
| 0.847134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fe021832d05126abbd66b46c9ca5f10f45e9b34
| 91
|
py
|
Python
|
Cluster head/meta_data.py
|
wanglabmccc/iiot_project
|
cd7cc4c9b888d8c74092dac01a5f1ab4648f91ad
|
[
"MIT"
] | null | null | null |
Cluster head/meta_data.py
|
wanglabmccc/iiot_project
|
cd7cc4c9b888d8c74092dac01a5f1ab4648f91ad
|
[
"MIT"
] | null | null | null |
Cluster head/meta_data.py
|
wanglabmccc/iiot_project
|
cd7cc4c9b888d8c74092dac01a5f1ab4648f91ad
|
[
"MIT"
] | null | null | null |
DEVICE_ID = "ap02"
IIOT_SERVER_ADDR = "140.113.179.7"
NTP_SERVER_ADDR = "140.113.179.7"
| 22.75
| 35
| 0.703297
| 17
| 91
| 3.470588
| 0.647059
| 0.338983
| 0.440678
| 0.542373
| 0.677966
| 0.677966
| 0
| 0
| 0
| 0
| 0
| 0.278481
| 0.131868
| 91
| 3
| 36
| 30.333333
| 0.468354
| 0
| 0
| 0
| 0
| 0
| 0.340909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b51300ba85725b38bed4ef352f4ecbc284f02657
| 103,229
|
py
|
Python
|
sdk/python/pulumi_oci/database/autonomous_container_database.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/database/autonomous_container_database.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/database/autonomous_container_database.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['AutonomousContainerDatabaseArgs', 'AutonomousContainerDatabase']
@pulumi.input_type
class AutonomousContainerDatabaseArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
patch_model: pulumi.Input[str],
autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
backup_config: Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
db_unique_name: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
key_store_id: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
maintenance_window_details: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']] = None,
peer_autonomous_container_database_backup_config: Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']] = None,
peer_autonomous_container_database_compartment_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_display_name: Optional[pulumi.Input[str]] = None,
peer_autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
peer_db_unique_name: Optional[pulumi.Input[str]] = None,
protection_mode: Optional[pulumi.Input[str]] = None,
rotate_key_trigger: Optional[pulumi.Input[bool]] = None,
service_level_agreement_type: Optional[pulumi.Input[str]] = None,
standby_maintenance_buffer_in_days: Optional[pulumi.Input[int]] = None,
vault_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AutonomousContainerDatabase resource.
:param pulumi.Input[str] display_name: (Updatable) The display name for the Autonomous Container Database.
:param pulumi.Input[str] patch_model: (Updatable) Database Patch model preference.
:param pulumi.Input[str] autonomous_exadata_infrastructure_id: The OCID of the Autonomous Exadata Infrastructure.
:param pulumi.Input[str] autonomous_vm_cluster_id: The OCID of the Autonomous VM Cluster.
:param pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs'] backup_config: (Updatable) Backup options for the Autonomous Container Database.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
:param pulumi.Input[str] db_unique_name: The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] key_store_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
:param pulumi.Input[str] kms_key_id: The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
:param pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs'] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input[str] peer_autonomous_container_database_compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
:param pulumi.Input[str] peer_autonomous_container_database_display_name: The display name for the peer Autonomous Container Database.
:param pulumi.Input[str] peer_autonomous_exadata_infrastructure_id: The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
:param pulumi.Input[str] peer_autonomous_vm_cluster_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
:param pulumi.Input[str] peer_db_unique_name: The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
:param pulumi.Input[str] protection_mode: The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
:param pulumi.Input[bool] rotate_key_trigger: (Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
:param pulumi.Input[str] service_level_agreement_type: The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
:param pulumi.Input[int] standby_maintenance_buffer_in_days: (Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
:param pulumi.Input[str] vault_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "patch_model", patch_model)
if autonomous_exadata_infrastructure_id is not None:
pulumi.set(__self__, "autonomous_exadata_infrastructure_id", autonomous_exadata_infrastructure_id)
if autonomous_vm_cluster_id is not None:
pulumi.set(__self__, "autonomous_vm_cluster_id", autonomous_vm_cluster_id)
if backup_config is not None:
pulumi.set(__self__, "backup_config", backup_config)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if db_unique_name is not None:
pulumi.set(__self__, "db_unique_name", db_unique_name)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if key_store_id is not None:
pulumi.set(__self__, "key_store_id", key_store_id)
if kms_key_id is not None:
pulumi.set(__self__, "kms_key_id", kms_key_id)
if maintenance_window_details is not None:
pulumi.set(__self__, "maintenance_window_details", maintenance_window_details)
if peer_autonomous_container_database_backup_config is not None:
pulumi.set(__self__, "peer_autonomous_container_database_backup_config", peer_autonomous_container_database_backup_config)
if peer_autonomous_container_database_compartment_id is not None:
pulumi.set(__self__, "peer_autonomous_container_database_compartment_id", peer_autonomous_container_database_compartment_id)
if peer_autonomous_container_database_display_name is not None:
pulumi.set(__self__, "peer_autonomous_container_database_display_name", peer_autonomous_container_database_display_name)
if peer_autonomous_exadata_infrastructure_id is not None:
pulumi.set(__self__, "peer_autonomous_exadata_infrastructure_id", peer_autonomous_exadata_infrastructure_id)
if peer_autonomous_vm_cluster_id is not None:
pulumi.set(__self__, "peer_autonomous_vm_cluster_id", peer_autonomous_vm_cluster_id)
if peer_db_unique_name is not None:
pulumi.set(__self__, "peer_db_unique_name", peer_db_unique_name)
if protection_mode is not None:
pulumi.set(__self__, "protection_mode", protection_mode)
if rotate_key_trigger is not None:
pulumi.set(__self__, "rotate_key_trigger", rotate_key_trigger)
if service_level_agreement_type is not None:
pulumi.set(__self__, "service_level_agreement_type", service_level_agreement_type)
if standby_maintenance_buffer_in_days is not None:
pulumi.set(__self__, "standby_maintenance_buffer_in_days", standby_maintenance_buffer_in_days)
if vault_id is not None:
pulumi.set(__self__, "vault_id", vault_id)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
(Updatable) The display name for the Autonomous Container Database.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="patchModel")
def patch_model(self) -> pulumi.Input[str]:
"""
(Updatable) Database Patch model preference.
"""
return pulumi.get(self, "patch_model")
@patch_model.setter
def patch_model(self, value: pulumi.Input[str]):
pulumi.set(self, "patch_model", value)
@property
@pulumi.getter(name="autonomousExadataInfrastructureId")
def autonomous_exadata_infrastructure_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "autonomous_exadata_infrastructure_id")
@autonomous_exadata_infrastructure_id.setter
def autonomous_exadata_infrastructure_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autonomous_exadata_infrastructure_id", value)
@property
@pulumi.getter(name="autonomousVmClusterId")
def autonomous_vm_cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the Autonomous VM Cluster.
"""
return pulumi.get(self, "autonomous_vm_cluster_id")
@autonomous_vm_cluster_id.setter
def autonomous_vm_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autonomous_vm_cluster_id", value)
@property
@pulumi.getter(name="backupConfig")
def backup_config(self) -> Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']]:
"""
(Updatable) Backup options for the Autonomous Container Database.
"""
return pulumi.get(self, "backup_config")
@backup_config.setter
def backup_config(self, value: Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']]):
pulumi.set(self, "backup_config", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="dbUniqueName")
def db_unique_name(self) -> Optional[pulumi.Input[str]]:
"""
The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
"""
return pulumi.get(self, "db_unique_name")
@db_unique_name.setter
def db_unique_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_unique_name", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="keyStoreId")
def key_store_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
"""
return pulumi.get(self, "key_store_id")
@key_store_id.setter
def key_store_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_store_id", value)
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
"""
return pulumi.get(self, "kms_key_id")
@kms_key_id.setter
def kms_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_id", value)
@property
@pulumi.getter(name="maintenanceWindowDetails")
def maintenance_window_details(self) -> Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]:
"""
(Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
"""
return pulumi.get(self, "maintenance_window_details")
@maintenance_window_details.setter
def maintenance_window_details(self, value: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]):
pulumi.set(self, "maintenance_window_details", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseBackupConfig")
def peer_autonomous_container_database_backup_config(self) -> Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]:
return pulumi.get(self, "peer_autonomous_container_database_backup_config")
@peer_autonomous_container_database_backup_config.setter
def peer_autonomous_container_database_backup_config(self, value: Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]):
pulumi.set(self, "peer_autonomous_container_database_backup_config", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseCompartmentId")
def peer_autonomous_container_database_compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
"""
return pulumi.get(self, "peer_autonomous_container_database_compartment_id")
@peer_autonomous_container_database_compartment_id.setter
def peer_autonomous_container_database_compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_container_database_compartment_id", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseDisplayName")
def peer_autonomous_container_database_display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name for the peer Autonomous Container Database.
"""
return pulumi.get(self, "peer_autonomous_container_database_display_name")
@peer_autonomous_container_database_display_name.setter
def peer_autonomous_container_database_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_container_database_display_name", value)
@property
@pulumi.getter(name="peerAutonomousExadataInfrastructureId")
def peer_autonomous_exadata_infrastructure_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
"""
return pulumi.get(self, "peer_autonomous_exadata_infrastructure_id")
@peer_autonomous_exadata_infrastructure_id.setter
def peer_autonomous_exadata_infrastructure_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_exadata_infrastructure_id", value)
@property
@pulumi.getter(name="peerAutonomousVmClusterId")
def peer_autonomous_vm_cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
"""
return pulumi.get(self, "peer_autonomous_vm_cluster_id")
@peer_autonomous_vm_cluster_id.setter
def peer_autonomous_vm_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_vm_cluster_id", value)
@property
@pulumi.getter(name="peerDbUniqueName")
def peer_db_unique_name(self) -> Optional[pulumi.Input[str]]:
"""
The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
"""
return pulumi.get(self, "peer_db_unique_name")
@peer_db_unique_name.setter
def peer_db_unique_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_db_unique_name", value)
@property
@pulumi.getter(name="protectionMode")
def protection_mode(self) -> Optional[pulumi.Input[str]]:
"""
The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
"""
return pulumi.get(self, "protection_mode")
@protection_mode.setter
def protection_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protection_mode", value)
@property
@pulumi.getter(name="rotateKeyTrigger")
def rotate_key_trigger(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
"""
return pulumi.get(self, "rotate_key_trigger")
@rotate_key_trigger.setter
def rotate_key_trigger(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "rotate_key_trigger", value)
@property
@pulumi.getter(name="serviceLevelAgreementType")
def service_level_agreement_type(self) -> Optional[pulumi.Input[str]]:
"""
The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "service_level_agreement_type")
@service_level_agreement_type.setter
def service_level_agreement_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_level_agreement_type", value)
@property
@pulumi.getter(name="standbyMaintenanceBufferInDays")
def standby_maintenance_buffer_in_days(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
"""
return pulumi.get(self, "standby_maintenance_buffer_in_days")
@standby_maintenance_buffer_in_days.setter
def standby_maintenance_buffer_in_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "standby_maintenance_buffer_in_days", value)
@property
@pulumi.getter(name="vaultId")
def vault_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
return pulumi.get(self, "vault_id")
@vault_id.setter
def vault_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vault_id", value)
@pulumi.input_type
class _AutonomousContainerDatabaseState:
def __init__(__self__, *,
autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
availability_domain: Optional[pulumi.Input[str]] = None,
backup_config: Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
db_unique_name: Optional[pulumi.Input[str]] = None,
db_version: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
infrastructure_type: Optional[pulumi.Input[str]] = None,
key_store_id: Optional[pulumi.Input[str]] = None,
key_store_wallet_name: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
last_maintenance_run_id: Optional[pulumi.Input[str]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowArgs']] = None,
maintenance_window_details: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']] = None,
next_maintenance_run_id: Optional[pulumi.Input[str]] = None,
patch_id: Optional[pulumi.Input[str]] = None,
patch_model: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_backup_config: Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']] = None,
peer_autonomous_container_database_compartment_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_display_name: Optional[pulumi.Input[str]] = None,
peer_autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
peer_db_unique_name: Optional[pulumi.Input[str]] = None,
protection_mode: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rotate_key_trigger: Optional[pulumi.Input[bool]] = None,
service_level_agreement_type: Optional[pulumi.Input[str]] = None,
standby_maintenance_buffer_in_days: Optional[pulumi.Input[int]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
vault_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AutonomousContainerDatabase resources.
:param pulumi.Input[str] autonomous_exadata_infrastructure_id: The OCID of the Autonomous Exadata Infrastructure.
:param pulumi.Input[str] autonomous_vm_cluster_id: The OCID of the Autonomous VM Cluster.
:param pulumi.Input[str] availability_domain: The availability domain of the Autonomous Container Database.
:param pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs'] backup_config: (Updatable) Backup options for the Autonomous Container Database.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
:param pulumi.Input[str] db_unique_name: The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
:param pulumi.Input[str] db_version: Oracle Database version of the Autonomous Container Database.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
:param pulumi.Input[str] display_name: (Updatable) The display name for the Autonomous Container Database.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] infrastructure_type: The infrastructure type this resource belongs to.
:param pulumi.Input[str] key_store_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
:param pulumi.Input[str] key_store_wallet_name: The wallet name for Oracle Key Vault.
:param pulumi.Input[str] kms_key_id: The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
:param pulumi.Input[str] last_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run.
:param pulumi.Input[str] lifecycle_details: Additional information about the current lifecycle state.
:param pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowArgs'] maintenance_window: The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs'] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input[str] next_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run.
:param pulumi.Input[str] patch_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last patch applied on the system.
:param pulumi.Input[str] patch_model: (Updatable) Database Patch model preference.
:param pulumi.Input[str] peer_autonomous_container_database_compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
:param pulumi.Input[str] peer_autonomous_container_database_display_name: The display name for the peer Autonomous Container Database.
:param pulumi.Input[str] peer_autonomous_exadata_infrastructure_id: The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
:param pulumi.Input[str] peer_autonomous_vm_cluster_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
:param pulumi.Input[str] peer_db_unique_name: The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
:param pulumi.Input[str] protection_mode: The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
:param pulumi.Input[str] role: The role of the dataguard enabled Autonomous Container Database.
:param pulumi.Input[bool] rotate_key_trigger: (Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
:param pulumi.Input[str] service_level_agreement_type: The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
:param pulumi.Input[int] standby_maintenance_buffer_in_days: (Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
:param pulumi.Input[str] state: The current state of the Autonomous Container Database.
:param pulumi.Input[str] time_created: The date and time the Autonomous Container Database was created.
:param pulumi.Input[str] vault_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
if autonomous_exadata_infrastructure_id is not None:
pulumi.set(__self__, "autonomous_exadata_infrastructure_id", autonomous_exadata_infrastructure_id)
if autonomous_vm_cluster_id is not None:
pulumi.set(__self__, "autonomous_vm_cluster_id", autonomous_vm_cluster_id)
if availability_domain is not None:
pulumi.set(__self__, "availability_domain", availability_domain)
if backup_config is not None:
pulumi.set(__self__, "backup_config", backup_config)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if db_unique_name is not None:
pulumi.set(__self__, "db_unique_name", db_unique_name)
if db_version is not None:
pulumi.set(__self__, "db_version", db_version)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if infrastructure_type is not None:
pulumi.set(__self__, "infrastructure_type", infrastructure_type)
if key_store_id is not None:
pulumi.set(__self__, "key_store_id", key_store_id)
if key_store_wallet_name is not None:
pulumi.set(__self__, "key_store_wallet_name", key_store_wallet_name)
if kms_key_id is not None:
pulumi.set(__self__, "kms_key_id", kms_key_id)
if last_maintenance_run_id is not None:
pulumi.set(__self__, "last_maintenance_run_id", last_maintenance_run_id)
if lifecycle_details is not None:
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if maintenance_window is not None:
pulumi.set(__self__, "maintenance_window", maintenance_window)
if maintenance_window_details is not None:
pulumi.set(__self__, "maintenance_window_details", maintenance_window_details)
if next_maintenance_run_id is not None:
pulumi.set(__self__, "next_maintenance_run_id", next_maintenance_run_id)
if patch_id is not None:
pulumi.set(__self__, "patch_id", patch_id)
if patch_model is not None:
pulumi.set(__self__, "patch_model", patch_model)
if peer_autonomous_container_database_backup_config is not None:
pulumi.set(__self__, "peer_autonomous_container_database_backup_config", peer_autonomous_container_database_backup_config)
if peer_autonomous_container_database_compartment_id is not None:
pulumi.set(__self__, "peer_autonomous_container_database_compartment_id", peer_autonomous_container_database_compartment_id)
if peer_autonomous_container_database_display_name is not None:
pulumi.set(__self__, "peer_autonomous_container_database_display_name", peer_autonomous_container_database_display_name)
if peer_autonomous_exadata_infrastructure_id is not None:
pulumi.set(__self__, "peer_autonomous_exadata_infrastructure_id", peer_autonomous_exadata_infrastructure_id)
if peer_autonomous_vm_cluster_id is not None:
pulumi.set(__self__, "peer_autonomous_vm_cluster_id", peer_autonomous_vm_cluster_id)
if peer_db_unique_name is not None:
pulumi.set(__self__, "peer_db_unique_name", peer_db_unique_name)
if protection_mode is not None:
pulumi.set(__self__, "protection_mode", protection_mode)
if role is not None:
pulumi.set(__self__, "role", role)
if rotate_key_trigger is not None:
pulumi.set(__self__, "rotate_key_trigger", rotate_key_trigger)
if service_level_agreement_type is not None:
pulumi.set(__self__, "service_level_agreement_type", service_level_agreement_type)
if standby_maintenance_buffer_in_days is not None:
pulumi.set(__self__, "standby_maintenance_buffer_in_days", standby_maintenance_buffer_in_days)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if vault_id is not None:
pulumi.set(__self__, "vault_id", vault_id)
@property
@pulumi.getter(name="autonomousExadataInfrastructureId")
def autonomous_exadata_infrastructure_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "autonomous_exadata_infrastructure_id")
@autonomous_exadata_infrastructure_id.setter
def autonomous_exadata_infrastructure_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autonomous_exadata_infrastructure_id", value)
@property
@pulumi.getter(name="autonomousVmClusterId")
def autonomous_vm_cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the Autonomous VM Cluster.
"""
return pulumi.get(self, "autonomous_vm_cluster_id")
@autonomous_vm_cluster_id.setter
def autonomous_vm_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "autonomous_vm_cluster_id", value)
@property
@pulumi.getter(name="availabilityDomain")
def availability_domain(self) -> Optional[pulumi.Input[str]]:
"""
The availability domain of the Autonomous Container Database.
"""
return pulumi.get(self, "availability_domain")
@availability_domain.setter
def availability_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_domain", value)
@property
@pulumi.getter(name="backupConfig")
def backup_config(self) -> Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']]:
"""
(Updatable) Backup options for the Autonomous Container Database.
"""
return pulumi.get(self, "backup_config")
@backup_config.setter
def backup_config(self, value: Optional[pulumi.Input['AutonomousContainerDatabaseBackupConfigArgs']]):
pulumi.set(self, "backup_config", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="dbUniqueName")
def db_unique_name(self) -> Optional[pulumi.Input[str]]:
"""
The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
"""
return pulumi.get(self, "db_unique_name")
@db_unique_name.setter
def db_unique_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_unique_name", value)
@property
@pulumi.getter(name="dbVersion")
def db_version(self) -> Optional[pulumi.Input[str]]:
"""
Oracle Database version of the Autonomous Container Database.
"""
return pulumi.get(self, "db_version")
@db_version.setter
def db_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "db_version", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The display name for the Autonomous Container Database.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="infrastructureType")
def infrastructure_type(self) -> Optional[pulumi.Input[str]]:
"""
The infrastructure type this resource belongs to.
"""
return pulumi.get(self, "infrastructure_type")
@infrastructure_type.setter
def infrastructure_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "infrastructure_type", value)
@property
@pulumi.getter(name="keyStoreId")
def key_store_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
"""
return pulumi.get(self, "key_store_id")
@key_store_id.setter
def key_store_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_store_id", value)
@property
@pulumi.getter(name="keyStoreWalletName")
def key_store_wallet_name(self) -> Optional[pulumi.Input[str]]:
"""
The wallet name for Oracle Key Vault.
"""
return pulumi.get(self, "key_store_wallet_name")
@key_store_wallet_name.setter
def key_store_wallet_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_store_wallet_name", value)
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
"""
return pulumi.get(self, "kms_key_id")
@kms_key_id.setter
def kms_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_id", value)
@property
@pulumi.getter(name="lastMaintenanceRunId")
def last_maintenance_run_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run.
"""
return pulumi.get(self, "last_maintenance_run_id")
@last_maintenance_run_id.setter
def last_maintenance_run_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_maintenance_run_id", value)
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> Optional[pulumi.Input[str]]:
"""
Additional information about the current lifecycle state.
"""
return pulumi.get(self, "lifecycle_details")
@lifecycle_details.setter
def lifecycle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecycle_details", value)
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowArgs']]:
"""
The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
"""
return pulumi.get(self, "maintenance_window")
@maintenance_window.setter
def maintenance_window(self, value: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowArgs']]):
pulumi.set(self, "maintenance_window", value)
@property
@pulumi.getter(name="maintenanceWindowDetails")
def maintenance_window_details(self) -> Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]:
"""
(Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
"""
return pulumi.get(self, "maintenance_window_details")
@maintenance_window_details.setter
def maintenance_window_details(self, value: Optional[pulumi.Input['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]):
pulumi.set(self, "maintenance_window_details", value)
@property
@pulumi.getter(name="nextMaintenanceRunId")
def next_maintenance_run_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run.
"""
return pulumi.get(self, "next_maintenance_run_id")
@next_maintenance_run_id.setter
def next_maintenance_run_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_maintenance_run_id", value)
@property
@pulumi.getter(name="patchId")
def patch_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last patch applied on the system.
"""
return pulumi.get(self, "patch_id")
@patch_id.setter
def patch_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "patch_id", value)
@property
@pulumi.getter(name="patchModel")
def patch_model(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Database Patch model preference.
"""
return pulumi.get(self, "patch_model")
@patch_model.setter
def patch_model(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "patch_model", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseBackupConfig")
def peer_autonomous_container_database_backup_config(self) -> Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]:
return pulumi.get(self, "peer_autonomous_container_database_backup_config")
@peer_autonomous_container_database_backup_config.setter
def peer_autonomous_container_database_backup_config(self, value: Optional[pulumi.Input['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]):
pulumi.set(self, "peer_autonomous_container_database_backup_config", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseCompartmentId")
def peer_autonomous_container_database_compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
"""
return pulumi.get(self, "peer_autonomous_container_database_compartment_id")
@peer_autonomous_container_database_compartment_id.setter
def peer_autonomous_container_database_compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_container_database_compartment_id", value)
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseDisplayName")
def peer_autonomous_container_database_display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name for the peer Autonomous Container Database.
"""
return pulumi.get(self, "peer_autonomous_container_database_display_name")
@peer_autonomous_container_database_display_name.setter
def peer_autonomous_container_database_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_container_database_display_name", value)
@property
@pulumi.getter(name="peerAutonomousExadataInfrastructureId")
def peer_autonomous_exadata_infrastructure_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
"""
return pulumi.get(self, "peer_autonomous_exadata_infrastructure_id")
@peer_autonomous_exadata_infrastructure_id.setter
def peer_autonomous_exadata_infrastructure_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_exadata_infrastructure_id", value)
@property
@pulumi.getter(name="peerAutonomousVmClusterId")
def peer_autonomous_vm_cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
"""
return pulumi.get(self, "peer_autonomous_vm_cluster_id")
@peer_autonomous_vm_cluster_id.setter
def peer_autonomous_vm_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_autonomous_vm_cluster_id", value)
@property
@pulumi.getter(name="peerDbUniqueName")
def peer_db_unique_name(self) -> Optional[pulumi.Input[str]]:
"""
The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
"""
return pulumi.get(self, "peer_db_unique_name")
@peer_db_unique_name.setter
def peer_db_unique_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_db_unique_name", value)
@property
@pulumi.getter(name="protectionMode")
def protection_mode(self) -> Optional[pulumi.Input[str]]:
"""
The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
"""
return pulumi.get(self, "protection_mode")
@protection_mode.setter
def protection_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protection_mode", value)
@property
@pulumi.getter
def role(self) -> Optional[pulumi.Input[str]]:
"""
The role of the dataguard enabled Autonomous Container Database.
"""
return pulumi.get(self, "role")
@role.setter
def role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "role", value)
@property
@pulumi.getter(name="rotateKeyTrigger")
def rotate_key_trigger(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
"""
return pulumi.get(self, "rotate_key_trigger")
@rotate_key_trigger.setter
def rotate_key_trigger(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "rotate_key_trigger", value)
@property
@pulumi.getter(name="serviceLevelAgreementType")
def service_level_agreement_type(self) -> Optional[pulumi.Input[str]]:
"""
The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "service_level_agreement_type")
@service_level_agreement_type.setter
def service_level_agreement_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_level_agreement_type", value)
@property
@pulumi.getter(name="standbyMaintenanceBufferInDays")
def standby_maintenance_buffer_in_days(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
"""
return pulumi.get(self, "standby_maintenance_buffer_in_days")
@standby_maintenance_buffer_in_days.setter
def standby_maintenance_buffer_in_days(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "standby_maintenance_buffer_in_days", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the Autonomous Container Database.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the Autonomous Container Database was created.
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="vaultId")
def vault_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
return pulumi.get(self, "vault_id")
@vault_id.setter
def vault_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vault_id", value)
class AutonomousContainerDatabase(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseBackupConfigArgs']]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
db_unique_name: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
key_store_id: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]] = None,
patch_model: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]] = None,
peer_autonomous_container_database_compartment_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_display_name: Optional[pulumi.Input[str]] = None,
peer_autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
peer_db_unique_name: Optional[pulumi.Input[str]] = None,
protection_mode: Optional[pulumi.Input[str]] = None,
rotate_key_trigger: Optional[pulumi.Input[bool]] = None,
service_level_agreement_type: Optional[pulumi.Input[str]] = None,
standby_maintenance_buffer_in_days: Optional[pulumi.Input[int]] = None,
vault_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Autonomous Container Database resource in Oracle Cloud Infrastructure Database service.
Creates an Autonomous Container Database in the specified Autonomous Exadata Infrastructure.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_autonomous_container_database = oci.database.AutonomousContainerDatabase("testAutonomousContainerDatabase",
display_name=var["autonomous_container_database_display_name"],
patch_model=var["autonomous_container_database_patch_model"],
autonomous_exadata_infrastructure_id=oci_database_autonomous_exadata_infrastructure["test_autonomous_exadata_infrastructure"]["id"],
autonomous_vm_cluster_id=oci_database_autonomous_vm_cluster["test_autonomous_vm_cluster"]["id"],
backup_config=oci.database.AutonomousContainerDatabaseBackupConfigArgs(
backup_destination_details=oci.database.AutonomousContainerDatabaseBackupConfigBackupDestinationDetailsArgs(
type=var["autonomous_container_database_backup_config_backup_destination_details_type"],
id=var["autonomous_container_database_backup_config_backup_destination_details_id"],
internet_proxy=var["autonomous_container_database_backup_config_backup_destination_details_internet_proxy"],
vpc_password=var["autonomous_container_database_backup_config_backup_destination_details_vpc_password"],
vpc_user=var["autonomous_container_database_backup_config_backup_destination_details_vpc_user"],
),
recovery_window_in_days=var["autonomous_container_database_backup_config_recovery_window_in_days"],
),
compartment_id=var["compartment_id"],
db_unique_name=var["autonomous_container_database_db_unique_name"],
defined_tags={
"Operations.CostCenter": "42",
},
freeform_tags={
"Department": "Finance",
},
key_store_id=oci_database_key_store["test_key_store"]["id"],
kms_key_id=oci_kms_key["test_key"]["id"],
maintenance_window_details=oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsArgs(
preference=var["autonomous_container_database_maintenance_window_details_preference"],
days_of_weeks=[oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsDaysOfWeekArgs(
name=var["autonomous_container_database_maintenance_window_details_days_of_week_name"],
)],
hours_of_days=var["autonomous_container_database_maintenance_window_details_hours_of_day"],
lead_time_in_weeks=var["autonomous_container_database_maintenance_window_details_lead_time_in_weeks"],
months=[oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsMonthArgs(
name=var["autonomous_container_database_maintenance_window_details_months_name"],
)],
weeks_of_months=var["autonomous_container_database_maintenance_window_details_weeks_of_month"],
),
peer_autonomous_container_database_display_name=var["autonomous_container_database_peer_autonomous_container_database_display_name"],
peer_autonomous_exadata_infrastructure_id=oci_database_autonomous_exadata_infrastructure["test_autonomous_exadata_infrastructure"]["id"],
protection_mode=var["autonomous_container_database_protection_mode"],
peer_autonomous_container_database_backup_config=oci.database.AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs(
backup_destination_details=[oci.database.AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigBackupDestinationDetailArgs(
type=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_type"],
id=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_id"],
internet_proxy=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_internet_proxy"],
vpc_password=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_vpc_password"],
vpc_user=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_vpc_user"],
)],
recovery_window_in_days=var["autonomous_container_database_peer_autonomous_container_database_backup_config_recovery_window_in_days"],
),
peer_autonomous_container_database_compartment_id=oci_identity_compartment["test_compartment"]["id"],
peer_autonomous_vm_cluster_id=oci_database_autonomous_vm_cluster["test_autonomous_vm_cluster"]["id"],
peer_db_unique_name=var["autonomous_container_database_peer_db_unique_name"],
service_level_agreement_type=var["autonomous_container_database_service_level_agreement_type"],
vault_id=oci_kms_vault["test_vault"]["id"],
standby_maintenance_buffer_in_days=var["autonomous_container_database_standby_maintenance_buffer_in_days"])
```
## Import
AutonomousContainerDatabases can be imported using the `id`, e.g.
```sh
$ pulumi import oci:database/autonomousContainerDatabase:AutonomousContainerDatabase test_autonomous_container_database "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] autonomous_exadata_infrastructure_id: The OCID of the Autonomous Exadata Infrastructure.
:param pulumi.Input[str] autonomous_vm_cluster_id: The OCID of the Autonomous VM Cluster.
:param pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseBackupConfigArgs']] backup_config: (Updatable) Backup options for the Autonomous Container Database.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
:param pulumi.Input[str] db_unique_name: The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
:param pulumi.Input[str] display_name: (Updatable) The display name for the Autonomous Container Database.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] key_store_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
:param pulumi.Input[str] kms_key_id: The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
:param pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input[str] patch_model: (Updatable) Database Patch model preference.
:param pulumi.Input[str] peer_autonomous_container_database_compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
:param pulumi.Input[str] peer_autonomous_container_database_display_name: The display name for the peer Autonomous Container Database.
:param pulumi.Input[str] peer_autonomous_exadata_infrastructure_id: The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
:param pulumi.Input[str] peer_autonomous_vm_cluster_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
:param pulumi.Input[str] peer_db_unique_name: The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
:param pulumi.Input[str] protection_mode: The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
:param pulumi.Input[bool] rotate_key_trigger: (Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
:param pulumi.Input[str] service_level_agreement_type: The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
:param pulumi.Input[int] standby_maintenance_buffer_in_days: (Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
:param pulumi.Input[str] vault_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AutonomousContainerDatabaseArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Autonomous Container Database resource in Oracle Cloud Infrastructure Database service.
Creates an Autonomous Container Database in the specified Autonomous Exadata Infrastructure.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_autonomous_container_database = oci.database.AutonomousContainerDatabase("testAutonomousContainerDatabase",
display_name=var["autonomous_container_database_display_name"],
patch_model=var["autonomous_container_database_patch_model"],
autonomous_exadata_infrastructure_id=oci_database_autonomous_exadata_infrastructure["test_autonomous_exadata_infrastructure"]["id"],
autonomous_vm_cluster_id=oci_database_autonomous_vm_cluster["test_autonomous_vm_cluster"]["id"],
backup_config=oci.database.AutonomousContainerDatabaseBackupConfigArgs(
backup_destination_details=oci.database.AutonomousContainerDatabaseBackupConfigBackupDestinationDetailsArgs(
type=var["autonomous_container_database_backup_config_backup_destination_details_type"],
id=var["autonomous_container_database_backup_config_backup_destination_details_id"],
internet_proxy=var["autonomous_container_database_backup_config_backup_destination_details_internet_proxy"],
vpc_password=var["autonomous_container_database_backup_config_backup_destination_details_vpc_password"],
vpc_user=var["autonomous_container_database_backup_config_backup_destination_details_vpc_user"],
),
recovery_window_in_days=var["autonomous_container_database_backup_config_recovery_window_in_days"],
),
compartment_id=var["compartment_id"],
db_unique_name=var["autonomous_container_database_db_unique_name"],
defined_tags={
"Operations.CostCenter": "42",
},
freeform_tags={
"Department": "Finance",
},
key_store_id=oci_database_key_store["test_key_store"]["id"],
kms_key_id=oci_kms_key["test_key"]["id"],
maintenance_window_details=oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsArgs(
preference=var["autonomous_container_database_maintenance_window_details_preference"],
days_of_weeks=[oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsDaysOfWeekArgs(
name=var["autonomous_container_database_maintenance_window_details_days_of_week_name"],
)],
hours_of_days=var["autonomous_container_database_maintenance_window_details_hours_of_day"],
lead_time_in_weeks=var["autonomous_container_database_maintenance_window_details_lead_time_in_weeks"],
months=[oci.database.AutonomousContainerDatabaseMaintenanceWindowDetailsMonthArgs(
name=var["autonomous_container_database_maintenance_window_details_months_name"],
)],
weeks_of_months=var["autonomous_container_database_maintenance_window_details_weeks_of_month"],
),
peer_autonomous_container_database_display_name=var["autonomous_container_database_peer_autonomous_container_database_display_name"],
peer_autonomous_exadata_infrastructure_id=oci_database_autonomous_exadata_infrastructure["test_autonomous_exadata_infrastructure"]["id"],
protection_mode=var["autonomous_container_database_protection_mode"],
peer_autonomous_container_database_backup_config=oci.database.AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs(
backup_destination_details=[oci.database.AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigBackupDestinationDetailArgs(
type=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_type"],
id=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_id"],
internet_proxy=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_internet_proxy"],
vpc_password=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_vpc_password"],
vpc_user=var["autonomous_container_database_peer_autonomous_container_database_backup_config_backup_destination_details_vpc_user"],
)],
recovery_window_in_days=var["autonomous_container_database_peer_autonomous_container_database_backup_config_recovery_window_in_days"],
),
peer_autonomous_container_database_compartment_id=oci_identity_compartment["test_compartment"]["id"],
peer_autonomous_vm_cluster_id=oci_database_autonomous_vm_cluster["test_autonomous_vm_cluster"]["id"],
peer_db_unique_name=var["autonomous_container_database_peer_db_unique_name"],
service_level_agreement_type=var["autonomous_container_database_service_level_agreement_type"],
vault_id=oci_kms_vault["test_vault"]["id"],
standby_maintenance_buffer_in_days=var["autonomous_container_database_standby_maintenance_buffer_in_days"])
```
## Import
AutonomousContainerDatabases can be imported using the `id`, e.g.
```sh
$ pulumi import oci:database/autonomousContainerDatabase:AutonomousContainerDatabase test_autonomous_container_database "id"
```
:param str resource_name: The name of the resource.
:param AutonomousContainerDatabaseArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AutonomousContainerDatabaseArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseBackupConfigArgs']]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
db_unique_name: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
key_store_id: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]] = None,
patch_model: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]] = None,
peer_autonomous_container_database_compartment_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_display_name: Optional[pulumi.Input[str]] = None,
peer_autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
peer_db_unique_name: Optional[pulumi.Input[str]] = None,
protection_mode: Optional[pulumi.Input[str]] = None,
rotate_key_trigger: Optional[pulumi.Input[bool]] = None,
service_level_agreement_type: Optional[pulumi.Input[str]] = None,
standby_maintenance_buffer_in_days: Optional[pulumi.Input[int]] = None,
vault_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AutonomousContainerDatabaseArgs.__new__(AutonomousContainerDatabaseArgs)
__props__.__dict__["autonomous_exadata_infrastructure_id"] = autonomous_exadata_infrastructure_id
__props__.__dict__["autonomous_vm_cluster_id"] = autonomous_vm_cluster_id
__props__.__dict__["backup_config"] = backup_config
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["db_unique_name"] = db_unique_name
__props__.__dict__["defined_tags"] = defined_tags
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["key_store_id"] = key_store_id
__props__.__dict__["kms_key_id"] = kms_key_id
__props__.__dict__["maintenance_window_details"] = maintenance_window_details
if patch_model is None and not opts.urn:
raise TypeError("Missing required property 'patch_model'")
__props__.__dict__["patch_model"] = patch_model
__props__.__dict__["peer_autonomous_container_database_backup_config"] = peer_autonomous_container_database_backup_config
__props__.__dict__["peer_autonomous_container_database_compartment_id"] = peer_autonomous_container_database_compartment_id
__props__.__dict__["peer_autonomous_container_database_display_name"] = peer_autonomous_container_database_display_name
__props__.__dict__["peer_autonomous_exadata_infrastructure_id"] = peer_autonomous_exadata_infrastructure_id
__props__.__dict__["peer_autonomous_vm_cluster_id"] = peer_autonomous_vm_cluster_id
__props__.__dict__["peer_db_unique_name"] = peer_db_unique_name
__props__.__dict__["protection_mode"] = protection_mode
__props__.__dict__["rotate_key_trigger"] = rotate_key_trigger
__props__.__dict__["service_level_agreement_type"] = service_level_agreement_type
__props__.__dict__["standby_maintenance_buffer_in_days"] = standby_maintenance_buffer_in_days
__props__.__dict__["vault_id"] = vault_id
__props__.__dict__["availability_domain"] = None
__props__.__dict__["db_version"] = None
__props__.__dict__["infrastructure_type"] = None
__props__.__dict__["key_store_wallet_name"] = None
__props__.__dict__["last_maintenance_run_id"] = None
__props__.__dict__["lifecycle_details"] = None
__props__.__dict__["maintenance_window"] = None
__props__.__dict__["next_maintenance_run_id"] = None
__props__.__dict__["patch_id"] = None
__props__.__dict__["role"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
super(AutonomousContainerDatabase, __self__).__init__(
'oci:database/autonomousContainerDatabase:AutonomousContainerDatabase',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
availability_domain: Optional[pulumi.Input[str]] = None,
backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseBackupConfigArgs']]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
db_unique_name: Optional[pulumi.Input[str]] = None,
db_version: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
infrastructure_type: Optional[pulumi.Input[str]] = None,
key_store_id: Optional[pulumi.Input[str]] = None,
key_store_wallet_name: Optional[pulumi.Input[str]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
last_maintenance_run_id: Optional[pulumi.Input[str]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
maintenance_window: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowArgs']]] = None,
maintenance_window_details: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']]] = None,
next_maintenance_run_id: Optional[pulumi.Input[str]] = None,
patch_id: Optional[pulumi.Input[str]] = None,
patch_model: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_backup_config: Optional[pulumi.Input[pulumi.InputType['AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfigArgs']]] = None,
peer_autonomous_container_database_compartment_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_container_database_display_name: Optional[pulumi.Input[str]] = None,
peer_autonomous_exadata_infrastructure_id: Optional[pulumi.Input[str]] = None,
peer_autonomous_vm_cluster_id: Optional[pulumi.Input[str]] = None,
peer_db_unique_name: Optional[pulumi.Input[str]] = None,
protection_mode: Optional[pulumi.Input[str]] = None,
role: Optional[pulumi.Input[str]] = None,
rotate_key_trigger: Optional[pulumi.Input[bool]] = None,
service_level_agreement_type: Optional[pulumi.Input[str]] = None,
standby_maintenance_buffer_in_days: Optional[pulumi.Input[int]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
vault_id: Optional[pulumi.Input[str]] = None) -> 'AutonomousContainerDatabase':
"""
Get an existing AutonomousContainerDatabase resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] autonomous_exadata_infrastructure_id: The OCID of the Autonomous Exadata Infrastructure.
:param pulumi.Input[str] autonomous_vm_cluster_id: The OCID of the Autonomous VM Cluster.
:param pulumi.Input[str] availability_domain: The availability domain of the Autonomous Container Database.
:param pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseBackupConfigArgs']] backup_config: (Updatable) Backup options for the Autonomous Container Database.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
:param pulumi.Input[str] db_unique_name: The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
:param pulumi.Input[str] db_version: Oracle Database version of the Autonomous Container Database.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
:param pulumi.Input[str] display_name: (Updatable) The display name for the Autonomous Container Database.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] infrastructure_type: The infrastructure type this resource belongs to.
:param pulumi.Input[str] key_store_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
:param pulumi.Input[str] key_store_wallet_name: The wallet name for Oracle Key Vault.
:param pulumi.Input[str] kms_key_id: The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
:param pulumi.Input[str] last_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run.
:param pulumi.Input[str] lifecycle_details: Additional information about the current lifecycle state.
:param pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowArgs']] maintenance_window: The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input[pulumi.InputType['AutonomousContainerDatabaseMaintenanceWindowDetailsArgs']] maintenance_window_details: (Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
:param pulumi.Input[str] next_maintenance_run_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run.
:param pulumi.Input[str] patch_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last patch applied on the system.
:param pulumi.Input[str] patch_model: (Updatable) Database Patch model preference.
:param pulumi.Input[str] peer_autonomous_container_database_compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
:param pulumi.Input[str] peer_autonomous_container_database_display_name: The display name for the peer Autonomous Container Database.
:param pulumi.Input[str] peer_autonomous_exadata_infrastructure_id: The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
:param pulumi.Input[str] peer_autonomous_vm_cluster_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
:param pulumi.Input[str] peer_db_unique_name: The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
:param pulumi.Input[str] protection_mode: The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
:param pulumi.Input[str] role: The role of the dataguard enabled Autonomous Container Database.
:param pulumi.Input[bool] rotate_key_trigger: (Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
:param pulumi.Input[str] service_level_agreement_type: The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
:param pulumi.Input[int] standby_maintenance_buffer_in_days: (Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
:param pulumi.Input[str] state: The current state of the Autonomous Container Database.
:param pulumi.Input[str] time_created: The date and time the Autonomous Container Database was created.
:param pulumi.Input[str] vault_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AutonomousContainerDatabaseState.__new__(_AutonomousContainerDatabaseState)
__props__.__dict__["autonomous_exadata_infrastructure_id"] = autonomous_exadata_infrastructure_id
__props__.__dict__["autonomous_vm_cluster_id"] = autonomous_vm_cluster_id
__props__.__dict__["availability_domain"] = availability_domain
__props__.__dict__["backup_config"] = backup_config
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["db_unique_name"] = db_unique_name
__props__.__dict__["db_version"] = db_version
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["infrastructure_type"] = infrastructure_type
__props__.__dict__["key_store_id"] = key_store_id
__props__.__dict__["key_store_wallet_name"] = key_store_wallet_name
__props__.__dict__["kms_key_id"] = kms_key_id
__props__.__dict__["last_maintenance_run_id"] = last_maintenance_run_id
__props__.__dict__["lifecycle_details"] = lifecycle_details
__props__.__dict__["maintenance_window"] = maintenance_window
__props__.__dict__["maintenance_window_details"] = maintenance_window_details
__props__.__dict__["next_maintenance_run_id"] = next_maintenance_run_id
__props__.__dict__["patch_id"] = patch_id
__props__.__dict__["patch_model"] = patch_model
__props__.__dict__["peer_autonomous_container_database_backup_config"] = peer_autonomous_container_database_backup_config
__props__.__dict__["peer_autonomous_container_database_compartment_id"] = peer_autonomous_container_database_compartment_id
__props__.__dict__["peer_autonomous_container_database_display_name"] = peer_autonomous_container_database_display_name
__props__.__dict__["peer_autonomous_exadata_infrastructure_id"] = peer_autonomous_exadata_infrastructure_id
__props__.__dict__["peer_autonomous_vm_cluster_id"] = peer_autonomous_vm_cluster_id
__props__.__dict__["peer_db_unique_name"] = peer_db_unique_name
__props__.__dict__["protection_mode"] = protection_mode
__props__.__dict__["role"] = role
__props__.__dict__["rotate_key_trigger"] = rotate_key_trigger
__props__.__dict__["service_level_agreement_type"] = service_level_agreement_type
__props__.__dict__["standby_maintenance_buffer_in_days"] = standby_maintenance_buffer_in_days
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["vault_id"] = vault_id
return AutonomousContainerDatabase(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autonomousExadataInfrastructureId")
def autonomous_exadata_infrastructure_id(self) -> pulumi.Output[str]:
"""
The OCID of the Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "autonomous_exadata_infrastructure_id")
@property
@pulumi.getter(name="autonomousVmClusterId")
def autonomous_vm_cluster_id(self) -> pulumi.Output[str]:
"""
The OCID of the Autonomous VM Cluster.
"""
return pulumi.get(self, "autonomous_vm_cluster_id")
@property
@pulumi.getter(name="availabilityDomain")
def availability_domain(self) -> pulumi.Output[str]:
"""
The availability domain of the Autonomous Container Database.
"""
return pulumi.get(self, "availability_domain")
@property
@pulumi.getter(name="backupConfig")
def backup_config(self) -> pulumi.Output['outputs.AutonomousContainerDatabaseBackupConfig']:
"""
(Updatable) Backup options for the Autonomous Container Database.
"""
return pulumi.get(self, "backup_config")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the Autonomous Container Database.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="dbUniqueName")
def db_unique_name(self) -> pulumi.Output[str]:
"""
The `DB_UNIQUE_NAME` of the Oracle Database being backed up.
"""
return pulumi.get(self, "db_unique_name")
@property
@pulumi.getter(name="dbVersion")
def db_version(self) -> pulumi.Output[str]:
"""
Oracle Database version of the Autonomous Container Database.
"""
return pulumi.get(self, "db_version")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) The display name for the Autonomous Container Database.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="infrastructureType")
def infrastructure_type(self) -> pulumi.Output[str]:
"""
The infrastructure type this resource belongs to.
"""
return pulumi.get(self, "infrastructure_type")
@property
@pulumi.getter(name="keyStoreId")
def key_store_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the key store.
"""
return pulumi.get(self, "key_store_id")
@property
@pulumi.getter(name="keyStoreWalletName")
def key_store_wallet_name(self) -> pulumi.Output[str]:
"""
The wallet name for Oracle Key Vault.
"""
return pulumi.get(self, "key_store_wallet_name")
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> pulumi.Output[str]:
"""
The OCID of the key container that is used as the master encryption key in database transparent data encryption (TDE) operations.
"""
return pulumi.get(self, "kms_key_id")
@property
@pulumi.getter(name="lastMaintenanceRunId")
def last_maintenance_run_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last maintenance run.
"""
return pulumi.get(self, "last_maintenance_run_id")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> pulumi.Output[str]:
"""
Additional information about the current lifecycle state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="maintenanceWindow")
def maintenance_window(self) -> pulumi.Output['outputs.AutonomousContainerDatabaseMaintenanceWindow']:
"""
The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
"""
return pulumi.get(self, "maintenance_window")
@property
@pulumi.getter(name="maintenanceWindowDetails")
def maintenance_window_details(self) -> pulumi.Output[Optional['outputs.AutonomousContainerDatabaseMaintenanceWindowDetails']]:
"""
(Updatable) The scheduling details for the quarterly maintenance window. Patching and system updates take place during the maintenance window.
"""
return pulumi.get(self, "maintenance_window_details")
@property
@pulumi.getter(name="nextMaintenanceRunId")
def next_maintenance_run_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the next maintenance run.
"""
return pulumi.get(self, "next_maintenance_run_id")
@property
@pulumi.getter(name="patchId")
def patch_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the last patch applied on the system.
"""
return pulumi.get(self, "patch_id")
@property
@pulumi.getter(name="patchModel")
def patch_model(self) -> pulumi.Output[str]:
"""
(Updatable) Database Patch model preference.
"""
return pulumi.get(self, "patch_model")
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseBackupConfig")
def peer_autonomous_container_database_backup_config(self) -> pulumi.Output['outputs.AutonomousContainerDatabasePeerAutonomousContainerDatabaseBackupConfig']:
return pulumi.get(self, "peer_autonomous_container_database_backup_config")
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseCompartmentId")
def peer_autonomous_container_database_compartment_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment where the standby Autonomous Container Database will be created.
"""
return pulumi.get(self, "peer_autonomous_container_database_compartment_id")
@property
@pulumi.getter(name="peerAutonomousContainerDatabaseDisplayName")
def peer_autonomous_container_database_display_name(self) -> pulumi.Output[str]:
"""
The display name for the peer Autonomous Container Database.
"""
return pulumi.get(self, "peer_autonomous_container_database_display_name")
@property
@pulumi.getter(name="peerAutonomousExadataInfrastructureId")
def peer_autonomous_exadata_infrastructure_id(self) -> pulumi.Output[str]:
"""
The OCID of the peer Autonomous Exadata Infrastructure for autonomous dataguard.
"""
return pulumi.get(self, "peer_autonomous_exadata_infrastructure_id")
@property
@pulumi.getter(name="peerAutonomousVmClusterId")
def peer_autonomous_vm_cluster_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the peer Autonomous VM cluster for Autonomous Data Guard. Required to enable Data Guard.
"""
return pulumi.get(self, "peer_autonomous_vm_cluster_id")
@property
@pulumi.getter(name="peerDbUniqueName")
def peer_db_unique_name(self) -> pulumi.Output[str]:
"""
The `DB_UNIQUE_NAME` of the peer Autonomous Container Database in a Data Guard association.
"""
return pulumi.get(self, "peer_db_unique_name")
@property
@pulumi.getter(name="protectionMode")
def protection_mode(self) -> pulumi.Output[str]:
"""
The protection mode of this Autonomous Data Guard association. For more information, see [Oracle Data Guard Protection Modes](http://docs.oracle.com/database/122/SBYDB/oracle-data-guard-protection-modes.htm#SBYDB02000) in the Oracle Data Guard documentation.
"""
return pulumi.get(self, "protection_mode")
@property
@pulumi.getter
def role(self) -> pulumi.Output[str]:
"""
The role of the dataguard enabled Autonomous Container Database.
"""
return pulumi.get(self, "role")
@property
@pulumi.getter(name="rotateKeyTrigger")
def rotate_key_trigger(self) -> pulumi.Output[Optional[bool]]:
"""
(Updatable) An optional property when flipped triggers rotation of KMS key. It is only applicable on dedicated container databases i.e. where `autonomous_exadata_infrastructure_id` is set.
"""
return pulumi.get(self, "rotate_key_trigger")
@property
@pulumi.getter(name="serviceLevelAgreementType")
def service_level_agreement_type(self) -> pulumi.Output[str]:
"""
The service level agreement type of the Autonomous Container Database. The default is STANDARD. For an autonomous dataguard Autonomous Container Database, the specified Autonomous Exadata Infrastructure must be associated with a remote Autonomous Exadata Infrastructure.
"""
return pulumi.get(self, "service_level_agreement_type")
@property
@pulumi.getter(name="standbyMaintenanceBufferInDays")
def standby_maintenance_buffer_in_days(self) -> pulumi.Output[int]:
"""
(Updatable) The scheduling detail for the quarterly maintenance window of standby Autonomous Container Database. This value represents the number of days before the primary database maintenance schedule.
"""
return pulumi.get(self, "standby_maintenance_buffer_in_days")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the Autonomous Container Database.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the Autonomous Container Database was created.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="vaultId")
def vault_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the Oracle Cloud Infrastructure [vault](https://docs.cloud.oracle.com/iaas/Content/KeyManagement/Concepts/keyoverview.htm#concepts).
"""
return pulumi.get(self, "vault_id")
| 62.298733
| 347
| 0.72468
| 11,976
| 103,229
| 5.937375
| 0.028557
| 0.057393
| 0.054341
| 0.05631
| 0.95622
| 0.945391
| 0.936869
| 0.93213
| 0.92334
| 0.899868
| 0
| 0.000724
| 0.184289
| 103,229
| 1,656
| 348
| 62.336353
| 0.843715
| 0.416656
| 0
| 0.795064
| 1
| 0
| 0.178106
| 0.120756
| 0
| 0
| 0
| 0
| 0
| 1
| 0.169528
| false
| 0.001073
| 0.007511
| 0.003219
| 0.281116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d22165d31a0eaf3b8f966c0275ff5119cf77e03a
| 2,854
|
py
|
Python
|
tests/test_resolve_class.py
|
smok-serwis/seqlog
|
17a74a2ba1ec26665560d01b6b04f576e890e29a
|
[
"MIT"
] | 12
|
2016-07-12T22:21:27.000Z
|
2021-10-18T09:31:22.000Z
|
tests/test_resolve_class.py
|
smok-serwis/seqlog
|
17a74a2ba1ec26665560d01b6b04f576e890e29a
|
[
"MIT"
] | 35
|
2016-11-16T01:47:51.000Z
|
2021-09-08T23:24:35.000Z
|
tests/test_resolve_class.py
|
smok-serwis/seqlog
|
17a74a2ba1ec26665560d01b6b04f576e890e29a
|
[
"MIT"
] | 10
|
2017-11-26T20:58:25.000Z
|
2020-11-24T15:06:50.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_seqlog
----------------------------------
Tests for `seqlog` module's `configure_from_file`.
"""
import json.encoder
from seqlog.structured_logging import _ensure_class
class TestResolveClass(object):
def test_ensure_class_default_json_encoder_str(self):
"""
Verify that the default json.encoder.JSONEncoder class can be resolved from a string.
"""
resolved_class = _ensure_class('json.encoder.JSONEncoder', compatible_class=json.encoder.JSONEncoder)
assert resolved_class == json.encoder.JSONEncoder
def test_ensure_class_default_json_encoder_class(self):
"""
Verify that the default json.encoder.JSONEncoder class can be resolved from itself.
"""
resolved_class = _ensure_class(json.encoder.JSONEncoder, compatible_class=json.encoder.JSONEncoder)
assert resolved_class == json.encoder.JSONEncoder
def test_ensure_class_test_json_encoder_str(self):
"""
Verify that the default tests.test_resolve_class.JSONEncoderTest class can be resolved from a string.
"""
resolved_class = _ensure_class('tests.test_resolve_class.JSONEncoderTest', compatible_class=json.encoder.JSONEncoder)
assert resolved_class == JSONEncoderTest
def test_ensure_class_test_json_encoder_class(self):
"""
Verify that the default tests.test_resolve_class.JSONEncoderTest class can be resolved from itself.
"""
resolved_class = _ensure_class(JSONEncoderTest, compatible_class=json.encoder.JSONEncoder)
assert resolved_class == JSONEncoderTest
def test_ensure_class_not_an_encoder_str(self):
"""
Verify that the default tests.test_resolve_class.NotAnEncoderTest class cannot be resolved from a string.
"""
try:
_ensure_class('tests.test_resolve_class.NotAnEncoderTest', compatible_class=json.encoder.JSONEncoder)
except ValueError:
pass
else:
raise AssertionError('_ensure_class should not permit a non-JSONEncoder class to be resolved if compatible_class is specified')
def test_ensure_class_not_an_encoder_class(self):
"""
Verify that the default tests.test_resolve_class.NotAnEncoderTest class can be resolved from itself.
"""
try:
_ensure_class(NotAnEncoderTest, compatible_class=json.encoder.JSONEncoder)
except ValueError:
pass
else:
raise AssertionError('_ensure_class should not permit a non-JSONEncoder class to be resolved if compatible_class is specified')
class JSONEncoderTest(json.encoder.JSONEncoder):
def __init__(self):
super().__init__(self)
class NotAnEncoderTest(object):
def __init__(self):
super().__init__(self)
| 35.234568
| 139
| 0.704975
| 329
| 2,854
| 5.805471
| 0.191489
| 0.103665
| 0.149738
| 0.141361
| 0.870157
| 0.859162
| 0.799476
| 0.763874
| 0.743979
| 0.743979
| 0
| 0.000443
| 0.209881
| 2,854
| 80
| 140
| 35.675
| 0.846563
| 0.252278
| 0
| 0.514286
| 0
| 0
| 0.156517
| 0.052843
| 0
| 0
| 0
| 0
| 0.171429
| 1
| 0.228571
| false
| 0.057143
| 0.057143
| 0
| 0.371429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
d27bfac7359d38e535382a459a744951214f2fcc
| 321
|
py
|
Python
|
parser.py
|
AdrKacz/linear-programming
|
e5fdbc54adcfaa03f9cc647511eccf8a0c4bb455
|
[
"MIT"
] | null | null | null |
parser.py
|
AdrKacz/linear-programming
|
e5fdbc54adcfaa03f9cc647511eccf8a0c4bb455
|
[
"MIT"
] | null | null | null |
parser.py
|
AdrKacz/linear-programming
|
e5fdbc54adcfaa03f9cc647511eccf8a0c4bb455
|
[
"MIT"
] | null | null | null |
day = 'false true true true true true true true true true true true false false false false false false false false false false false false false false false false false false false false false false false false false false false false'
day = ' | '.join(day.replace('true', 'X').replace('false', '-').split())
print(day)
| 80.25
| 236
| 0.744548
| 52
| 321
| 4.596154
| 0.153846
| 1.129707
| 1.631799
| 2.09205
| 0.769874
| 0.769874
| 0.769874
| 0.769874
| 0.769874
| 0.769874
| 0
| 0
| 0.155763
| 321
| 4
| 237
| 80.25
| 0.881919
| 0
| 0
| 0
| 0
| 0.333333
| 0.751553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
737bda2d07460b030f35b8d8298e3adfd8814eb2
| 1,608
|
py
|
Python
|
RasPi_Dev/ros_ws/build/third_packages/teb_local_planner/catkin_generated/pkg.installspace.context.pc.py
|
QianheYu/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T03:31:15.000Z
|
2022-03-11T03:31:15.000Z
|
RasPi_Dev/ros_ws/build/third_packages/teb_local_planner/catkin_generated/pkg.installspace.context.pc.py
|
bravetree/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | null | null | null |
RasPi_Dev/ros_ws/build/third_packages/teb_local_planner/catkin_generated/pkg.installspace.context.pc.py
|
bravetree/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/xtark/ros_ws/install/include;/usr/include/suitesparse;/home/xtark/Software/libg2o-release/g2o/;/home/xtark/Software/libg2o-release".split(';') if "/home/xtark/ros_ws/install/include;/usr/include/suitesparse;/home/xtark/Software/libg2o-release/g2o/;/home/xtark/Software/libg2o-release" != "" else []
PROJECT_CATKIN_DEPENDS = "base_local_planner;costmap_2d;costmap_converter;dynamic_reconfigure;geometry_msgs;interactive_markers;message_runtime;nav_core;nav_msgs;pluginlib;roscpp;std_msgs;tf;tf_conversions;visualization_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lteb_local_planner;-lamd;-lbtf;-lcamd;-lccolamd;-lcholmod;-lcolamd;-lcxsparse;-lklu;-lumfpack;-lspqr;/usr/lib/libg2o_csparse_extension.so;/usr/lib/libg2o_core.so;/usr/lib/libg2o_stuff.so;/usr/lib/libg2o_types_slam2d.so;/usr/lib/libg2o_types_slam3d.so;/usr/lib/libg2o_solver_cholmod.so;/usr/lib/libg2o_solver_pcg.so;/usr/lib/libg2o_solver_csparse.so;/usr/lib/libg2o_incremental.so".split(';') if "-lteb_local_planner;-lamd;-lbtf;-lcamd;-lccolamd;-lcholmod;-lcolamd;-lcxsparse;-lklu;-lumfpack;-lspqr;/usr/lib/libg2o_csparse_extension.so;/usr/lib/libg2o_core.so;/usr/lib/libg2o_stuff.so;/usr/lib/libg2o_types_slam2d.so;/usr/lib/libg2o_types_slam3d.so;/usr/lib/libg2o_solver_cholmod.so;/usr/lib/libg2o_solver_pcg.so;/usr/lib/libg2o_solver_csparse.so;/usr/lib/libg2o_incremental.so" != "" else []
PROJECT_NAME = "teb_local_planner"
PROJECT_SPACE_DIR = "/home/xtark/ros_ws/install"
PROJECT_VERSION = "0.6.13"
| 178.666667
| 828
| 0.812189
| 246
| 1,608
| 5.03252
| 0.349594
| 0.087237
| 0.174475
| 0.180937
| 0.677706
| 0.660743
| 0.660743
| 0.660743
| 0.660743
| 0.660743
| 0
| 0.021033
| 0.024254
| 1,608
| 8
| 829
| 201
| 0.768005
| 0.033582
| 0
| 0
| 1
| 0.714286
| 0.826675
| 0.809278
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
73a75da6c28970f512a33f6b4548738fce6281f7
| 40,119
|
py
|
Python
|
gapic/samplegen_utils/snippet_metadata_pb2.py
|
Kache/gapic-generator-python
|
dcfcb32a60ac6243932304d757e7ef86008d5729
|
[
"Apache-2.0"
] | null | null | null |
gapic/samplegen_utils/snippet_metadata_pb2.py
|
Kache/gapic-generator-python
|
dcfcb32a60ac6243932304d757e7ef86008d5729
|
[
"Apache-2.0"
] | null | null | null |
gapic/samplegen_utils/snippet_metadata_pb2.py
|
Kache/gapic-generator-python
|
dcfcb32a60ac6243932304d757e7ef86008d5729
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: snippet_metadata.proto
# type: ignore
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='snippet_metadata.proto',
package='google.cloud.tools.snippetgen.snippetindex.v1',
syntax='proto3',
serialized_options=b'\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x16snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1\"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet\"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType\"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06\"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03\"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api\"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service\"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\"\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3'
)
_LANGUAGE = _descriptor.EnumDescriptor(
name='Language',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Language',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LANGUAGE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='C_PLUS_PLUS', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='C_SHARP', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DART', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ELIXIR', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ERLANG', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='F_SHARP', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GO', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='JAVA', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='JAVASCRIPT', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KOTLIN', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PHP', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PYTHON', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RUBY', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RUST', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SWIFT', index=15, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TYPESCRIPT', index=16, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VB_NET', index=17, number=17,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1873,
serialized_end=2112,
)
_sym_db.RegisterEnumDescriptor(_LANGUAGE)
Language = enum_type_wrapper.EnumTypeWrapper(_LANGUAGE)
LANGUAGE_UNSPECIFIED = 0
C_PLUS_PLUS = 1
C_SHARP = 2
DART = 3
ELIXIR = 4
ERLANG = 5
F_SHARP = 6
GO = 7
JAVA = 8
JAVASCRIPT = 9
KOTLIN = 10
PHP = 11
PYTHON = 12
RUBY = 13
RUST = 14
SWIFT = 15
TYPESCRIPT = 16
VB_NET = 17
_SNIPPET_SEGMENT_SEGMENTTYPE = _descriptor.EnumDescriptor(
name='SegmentType',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SEGMENT_TYPE_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FULL', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHORT', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CLIENT_INITIALIZATION', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REQUEST_INITIALIZATION', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REQUEST_EXECUTION', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RESPONSE_HANDLING', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=795,
serialized_end=960,
)
_sym_db.RegisterEnumDescriptor(_SNIPPET_SEGMENT_SEGMENTTYPE)
_SNIPPET_ORIGIN = _descriptor.EnumDescriptor(
name='Origin',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ORIGIN_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='API_DEFINITION', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONFIG', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HANDWRITTEN', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=962,
serialized_end=1043,
)
_sym_db.RegisterEnumDescriptor(_SNIPPET_ORIGIN)
_INDEX = _descriptor.Descriptor(
name='Index',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='client_library', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.client_library', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='snippets', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.snippets', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=74,
serialized_end=241,
)
_SNIPPET_SEGMENT = _descriptor.Descriptor(
name='Segment',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='start', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.start', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.end', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_SNIPPET_SEGMENT_SEGMENTTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=665,
serialized_end=960,
)
_SNIPPET = _descriptor.Descriptor(
name='Snippet',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='region_tag', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.region_tag', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.title', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.description', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='file', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.file', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.language', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.client_method', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='canonical', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.canonical', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='origin', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.origin', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='segments', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.segments', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_SNIPPET_SEGMENT, ],
enum_types=[
_SNIPPET_ORIGIN,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=244,
serialized_end=1043,
)
_CLIENTMETHOD_PARAMETER = _descriptor.Descriptor(
name='Parameter',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1377,
serialized_end=1416,
)
_CLIENTMETHOD = _descriptor.Descriptor(
name='ClientMethod',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.short_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.full_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='async', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.async', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parameters', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.parameters', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result_type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.result_type', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.client', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.method', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_CLIENTMETHOD_PARAMETER, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1046,
serialized_end=1416,
)
_SERVICECLIENT = _descriptor.Descriptor(
name='ServiceClient',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.short_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.full_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1418,
serialized_end=1472,
)
_CLIENTLIBRARY = _descriptor.Descriptor(
name='ClientLibrary',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.language', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='apis', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.apis', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1475,
serialized_end=1662,
)
_METHOD = _descriptor.Descriptor(
name='Method',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.short_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.full_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.service', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1664,
serialized_end=1784,
)
_SERVICE = _descriptor.Descriptor(
name='Service',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.short_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.full_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1786,
serialized_end=1834,
)
_API = _descriptor.Descriptor(
name='Api',
full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1836,
serialized_end=1870,
)
_INDEX.fields_by_name['client_library'].message_type = _CLIENTLIBRARY
_INDEX.fields_by_name['snippets'].message_type = _SNIPPET
_SNIPPET_SEGMENT.fields_by_name['type'].enum_type = _SNIPPET_SEGMENT_SEGMENTTYPE
_SNIPPET_SEGMENT.containing_type = _SNIPPET
_SNIPPET_SEGMENT_SEGMENTTYPE.containing_type = _SNIPPET_SEGMENT
_SNIPPET.fields_by_name['language'].enum_type = _LANGUAGE
_SNIPPET.fields_by_name['client_method'].message_type = _CLIENTMETHOD
_SNIPPET.fields_by_name['origin'].enum_type = _SNIPPET_ORIGIN
_SNIPPET.fields_by_name['segments'].message_type = _SNIPPET_SEGMENT
_SNIPPET_ORIGIN.containing_type = _SNIPPET
_CLIENTMETHOD_PARAMETER.containing_type = _CLIENTMETHOD
_CLIENTMETHOD.fields_by_name['parameters'].message_type = _CLIENTMETHOD_PARAMETER
_CLIENTMETHOD.fields_by_name['client'].message_type = _SERVICECLIENT
_CLIENTMETHOD.fields_by_name['method'].message_type = _METHOD
_CLIENTLIBRARY.fields_by_name['language'].enum_type = _LANGUAGE
_CLIENTLIBRARY.fields_by_name['apis'].message_type = _API
_METHOD.fields_by_name['service'].message_type = _SERVICE
DESCRIPTOR.message_types_by_name['Index'] = _INDEX
DESCRIPTOR.message_types_by_name['Snippet'] = _SNIPPET
DESCRIPTOR.message_types_by_name['ClientMethod'] = _CLIENTMETHOD
DESCRIPTOR.message_types_by_name['ServiceClient'] = _SERVICECLIENT
DESCRIPTOR.message_types_by_name['ClientLibrary'] = _CLIENTLIBRARY
DESCRIPTOR.message_types_by_name['Method'] = _METHOD
DESCRIPTOR.message_types_by_name['Service'] = _SERVICE
DESCRIPTOR.message_types_by_name['Api'] = _API
DESCRIPTOR.enum_types_by_name['Language'] = _LANGUAGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), {
'DESCRIPTOR': _INDEX,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Index)
})
_sym_db.RegisterMessage(Index)
Snippet = _reflection.GeneratedProtocolMessageType('Snippet', (_message.Message,), {
'Segment': _reflection.GeneratedProtocolMessageType('Segment', (_message.Message,), {
'DESCRIPTOR': _SNIPPET_SEGMENT,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment)
}),
'DESCRIPTOR': _SNIPPET,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet)
})
_sym_db.RegisterMessage(Snippet)
_sym_db.RegisterMessage(Snippet.Segment)
ClientMethod = _reflection.GeneratedProtocolMessageType('ClientMethod', (_message.Message,), {
'Parameter': _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), {
'DESCRIPTOR': _CLIENTMETHOD_PARAMETER,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter)
}),
'DESCRIPTOR': _CLIENTMETHOD,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod)
})
_sym_db.RegisterMessage(ClientMethod)
_sym_db.RegisterMessage(ClientMethod.Parameter)
ServiceClient = _reflection.GeneratedProtocolMessageType('ServiceClient', (_message.Message,), {
'DESCRIPTOR': _SERVICECLIENT,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient)
})
_sym_db.RegisterMessage(ServiceClient)
ClientLibrary = _reflection.GeneratedProtocolMessageType('ClientLibrary', (_message.Message,), {
'DESCRIPTOR': _CLIENTLIBRARY,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary)
})
_sym_db.RegisterMessage(ClientLibrary)
Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), {
'DESCRIPTOR': _METHOD,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Method)
})
_sym_db.RegisterMessage(Method)
Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), {
'DESCRIPTOR': _SERVICE,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Service)
})
_sym_db.RegisterMessage(Service)
Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), {
'DESCRIPTOR': _API,
'__module__': 'snippet_metadata_pb2'
# @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Api)
})
_sym_db.RegisterMessage(Api)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 47.703924
| 3,689
| 0.698871
| 4,741
| 40,119
| 5.616537
| 0.066231
| 0.053402
| 0.093473
| 0.096139
| 0.775049
| 0.752854
| 0.744292
| 0.727655
| 0.720407
| 0.666592
| 0
| 0.040433
| 0.185648
| 40,119
| 840
| 3,690
| 47.760714
| 0.774601
| 0.029437
| 0
| 0.684143
| 1
| 0.005115
| 0.184624
| 0.147134
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006394
| 0
| 0.006394
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73abf97eeca11988de83cbdf0294dbe85e12cb08
| 61,346
|
py
|
Python
|
testcases/position_test.py
|
njiang1987/pyalgotrade
|
c1098f0bec47aa13ed628846b38008222aacfd4b
|
[
"Apache-2.0"
] | 1
|
2021-05-13T14:11:02.000Z
|
2021-05-13T14:11:02.000Z
|
testcases/position_test.py
|
njiang1987/pyalgotrade
|
c1098f0bec47aa13ed628846b38008222aacfd4b
|
[
"Apache-2.0"
] | null | null | null |
testcases/position_test.py
|
njiang1987/pyalgotrade
|
c1098f0bec47aa13ed628846b38008222aacfd4b
|
[
"Apache-2.0"
] | null | null | null |
# PyAlgoTrade
#
# Copyright 2011-2015 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import datetime
import pytz
import common
import strategy_test
from pyalgotrade import bar
from pyalgotrade import strategy
from pyalgotrade.strategy import position
from pyalgotrade.barfeed import yahoofeed
from pyalgotrade.barfeed import csvfeed
from pyalgotrade import barfeed
from pyalgotrade.barfeed import membf
from pyalgotrade.barfeed import ninjatraderfeed
from pyalgotrade.utils import dt
from pyalgotrade import marketsession
def load_daily_barfeed(instrument):
barFeed = yahoofeed.Feed()
barFeed.addBarsFromCSV(instrument, common.get_data_file_path("orcl-2000-yahoofinance.csv"))
return barFeed
def us_equities_datetime(*args, **kwargs):
ret = datetime.datetime(*args, **kwargs)
ret = dt.localize(ret, marketsession.USEquities.getTimezone())
return ret
class TestBarFeed(membf.BarFeed):
def barsHaveAdjClose(self):
raise NotImplementedError()
class BaseTestStrategy(strategy.BacktestingStrategy):
def __init__(self, barFeed, instrument, cash=1000000):
strategy.BacktestingStrategy.__init__(self, barFeed, cash)
self.instrument = instrument
self.orderUpdatedCalls = 0
self.enterOkCalls = 0
self.enterCanceledCalls = 0
self.exitOkCalls = 0
self.exitCanceledCalls = 0
self.posExecutionInfo = []
def onOrderUpdated(self, order):
self.orderUpdatedCalls += 1
def onEnterOk(self, position):
self.enterOkCalls += 1
self.posExecutionInfo.append(position.getEntryOrder().getExecutionInfo())
def onEnterCanceled(self, position):
self.enterCanceledCalls += 1
self.posExecutionInfo.append(position.getEntryOrder().getExecutionInfo())
def onExitOk(self, position):
self.exitOkCalls += 1
self.posExecutionInfo.append(position.getExitOrder().getExecutionInfo())
def onExitCanceled(self, position):
self.exitCanceledCalls += 1
self.posExecutionInfo.append(position.getExitOrder().getExecutionInfo())
class TestStrategy(BaseTestStrategy):
def __init__(self, barFeed, instrument, cash):
BaseTestStrategy.__init__(self, barFeed, instrument, cash)
self.__activePosition = None
# Maps dates to a tuple of (method, params)
self.__posEntry = {}
self.__posExit = {}
self.__result = 0
self.__netProfit = 0
self.positions = []
def addPosEntry(self, dateTime, enterMethod, *args, **kwargs):
self.__posEntry.setdefault(dateTime, [])
self.__posEntry[dateTime].append((enterMethod, args, kwargs))
def addPosExitMarket(self, dateTime, *args, **kwargs):
self.__posExit.setdefault(dateTime, [])
self.__posExit[dateTime].append((position.Position.exitMarket, args, kwargs))
def addPosExitLimit(self, dateTime, *args, **kwargs):
self.__posExit.setdefault(dateTime, [])
self.__posExit[dateTime].append((position.Position.exitLimit, args, kwargs))
def addPosExitStop(self, dateTime, *args, **kwargs):
self.__posExit.setdefault(dateTime, [])
self.__posExit[dateTime].append((position.Position.exitStop, args, kwargs))
def addPosExitStopLimit(self, dateTime, *args, **kwargs):
self.__posExit.setdefault(dateTime, [])
self.__posExit[dateTime].append((position.Position.exitStopLimit, args, kwargs))
def getResult(self):
return self.__result
def getNetProfit(self):
return self.__netProfit
def getActivePosition(self):
return self.__activePosition
def onEnterOk(self, position):
# print "Enter ok", position.getEntryOrder().getExecutionInfo().getDateTime()
BaseTestStrategy.onEnterOk(self, position)
if self.__activePosition is None:
self.__activePosition = position
assert(position.isOpen())
assert(len(position.getActiveOrders()) != 0)
assert(position.getShares() != 0)
def onEnterCanceled(self, position):
# print "Enter canceled", position.getEntryOrder().getExecutionInfo().getDateTime()
BaseTestStrategy.onEnterCanceled(self, position)
self.__activePosition = None
assert(not position.isOpen())
assert(len(position.getActiveOrders()) == 0)
assert(position.getShares() == 0)
def onExitOk(self, position):
# print "Exit ok", position.getExitOrder().getExecutionInfo().getDateTime()
BaseTestStrategy.onExitOk(self, position)
self.__result += position.getReturn()
self.__netProfit += position.getPnL()
self.__activePosition = None
assert(not position.isOpen())
assert(len(position.getActiveOrders()) == 0)
assert(position.getShares() == 0)
def onExitCanceled(self, position):
# print "Exit canceled", position.getExitOrder().getExecutionInfo().getDateTime()
BaseTestStrategy.onExitCanceled(self, position)
assert(position.isOpen())
assert(len(position.getActiveOrders()) == 0)
assert(position.getShares() != 0)
def onBars(self, bars):
dateTime = bars.getDateTime()
# Check position entry.
for meth, args, kwargs in strategy_test.get_by_datetime_or_date(self.__posEntry, dateTime):
if self.__activePosition is not None:
raise Exception("Only one position allowed at a time")
self.__activePosition = meth(*args, **kwargs)
self.positions.append(self.__activePosition)
# Check position exit.
for meth, args, kwargs in strategy_test.get_by_datetime_or_date(self.__posExit, dateTime):
if self.__activePosition is None:
raise Exception("A position was not entered")
meth(self.__activePosition, *args, **kwargs)
class EnterAndExitStrategy(BaseTestStrategy):
def onStart(self):
self.position = None
def onBars(self, bars):
if self.position is None:
self.position = self.enterLong(self.instrument, 1)
elif self.position.entryFilled() and not self.position.exitFilled():
self.position.exitMarket()
class DoubleExitStrategy(BaseTestStrategy):
def onStart(self):
self.position = None
self.doubleExit = False
self.doubleExitFailed = False
def onBars(self, bars):
if self.position is None:
self.position = self.enterLong(self.instrument, 1)
elif not self.doubleExit:
self.doubleExit = True
self.position.exitMarket()
try:
self.position.exitMarket()
except Exception:
self.doubleExitFailed = True
class CancelEntryStrategy(BaseTestStrategy):
def onStart(self):
self.position = None
def onBars(self, bars):
if self.position is None:
self.position = self.enterLong(self.instrument, 1)
self.position.cancelEntry()
class ExitEntryNotFilledStrategy(BaseTestStrategy):
def onStart(self):
self.position = None
def onBars(self, bars):
if self.position is None:
self.position = self.enterLong(self.instrument, 1)
self.position.exitMarket()
class ResubmitExitStrategy(BaseTestStrategy):
def onStart(self):
self.position = None
self.exitRequestCanceled = False
def onBars(self, bars):
if self.position is None:
self.position = self.enterLong(self.instrument, 1)
elif self.position.entryFilled() and not self.position.exitFilled():
self.position.exitMarket()
if not self.exitRequestCanceled:
self.position.cancelExit()
self.exitRequestCanceled = True
class BaseTestCase(common.TestCase):
TestInstrument = "doesntmatter"
def loadIntradayBarFeed(self):
fromMonth = 1
toMonth = 1
fromDay = 3
toDay = 3
barFilter = csvfeed.USEquitiesRTH(us_equities_datetime(2011, fromMonth, fromDay, 00, 00), us_equities_datetime(2011, toMonth, toDay, 23, 59))
barFeed = ninjatraderfeed.Feed(barfeed.Frequency.MINUTE)
barFeed.setBarFilter(barFilter)
barFeed.addBarsFromCSV(BaseTestCase.TestInstrument, common.get_data_file_path("nt-spy-minute-2011.csv"))
return barFeed
def loadDailyBarFeed(self):
barFeed = yahoofeed.Feed()
barFeed.addBarsFromCSV(BaseTestCase.TestInstrument, common.get_data_file_path("orcl-2000-yahoofinance.csv"))
return barFeed
def createStrategy(self, useIntradayBarFeed=False):
if useIntradayBarFeed:
barFeed = self.loadIntradayBarFeed()
else:
barFeed = self.loadDailyBarFeed()
strat = TestStrategy(barFeed, BaseTestCase.TestInstrument, 1000)
return strat
class LongPosTestCase(BaseTestCase):
def testEnterAndExit(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = EnterAndExitStrategy(barFeed, instrument)
strat.run()
self.assertEqual(strat.position.isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(strat.orderUpdatedCalls, 4)
self.assertEqual(len(strat.getActivePositions()), 0)
self.assertEqual(len(strat.getOrderToPosition()), 0)
self.assertEqual(strat.position.getAge().days, 1)
def testCancelEntry(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = CancelEntryStrategy(barFeed, instrument)
strat.run()
self.assertEqual(strat.position.isOpen(), False)
self.assertEqual(strat.enterOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(strat.orderUpdatedCalls, 1)
self.assertEqual(len(strat.getActivePositions()), 0)
self.assertEqual(len(strat.getOrderToPosition()), 0)
self.assertEqual(strat.position.getAge().total_seconds(), 0)
def testExitEntryNotFilled(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = ExitEntryNotFilledStrategy(barFeed, instrument)
strat.run()
self.assertEqual(strat.position.isOpen(), False)
self.assertEqual(strat.enterOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(strat.orderUpdatedCalls, 1)
self.assertEqual(len(strat.getActivePositions()), 0)
self.assertEqual(len(strat.getOrderToPosition()), 0)
self.assertEqual(strat.position.getAge().total_seconds(), 0)
def testDoubleExitFails(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = DoubleExitStrategy(barFeed, instrument)
strat.run()
self.assertEqual(strat.position.isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(strat.orderUpdatedCalls, 4)
self.assertEqual(strat.doubleExit, True)
self.assertEqual(strat.doubleExitFailed, True)
self.assertEqual(len(strat.getActivePositions()), 0)
self.assertEqual(len(strat.getOrderToPosition()), 0)
self.assertEqual(strat.position.getAge().days, 1)
def testResubmitExit(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = ResubmitExitStrategy(barFeed, instrument)
strat.run()
self.assertEqual(strat.position.isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 1)
self.assertEqual(strat.orderUpdatedCalls, 5)
self.assertEqual(len(strat.getActivePositions()), 0)
self.assertEqual(len(strat.getOrderToPosition()), 0)
self.assertEqual(strat.position.getAge().days, 2)
def testLongPosition(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-08,27.37,27.50,24.50,24.81,63040000,24.26 - Sell
# 2000-11-07,28.37,28.44,26.50,26.56,58950800,25.97 - Exit long
# 2000-11-06,30.69,30.69,27.50,27.94,75552300,27.32 - Buy
# 2000-11-03,31.50,31.75,29.50,30.31,65020900,29.64 - Enter long
strat.addPosEntry(datetime.datetime(2000, 11, 3), strat.enterLong, BaseTestCase.TestInstrument, 1, False)
strat.addPosExitMarket(datetime.datetime(2000, 11, 7))
strat.run()
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.orderUpdatedCalls, 4)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + 27.37 - 30.69, 2))
self.assertTrue(round(strat.getResult(), 3) == -0.108)
self.assertTrue(round(strat.getNetProfit(), 2) == round(27.37 - 30.69, 2))
self.assertEqual(strat.positions[0].getAge().days, 2)
def testLongPositionAdjClose(self):
strat = self.createStrategy()
strat.setUseAdjustedValues(True)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-10-13,31.00,35.75,31.00,35.63,38516200,34.84
# 2000-10-12,63.81,64.87,61.75,63.00,50892400,30.80
# 2000-01-19,56.13,58.25,54.00,57.13,49208800,27.93
# 2000-01-18,107.87,114.50,105.62,111.25,66791200,27.19
strat.addPosEntry(datetime.datetime(2000, 1, 18), strat.enterLong, BaseTestCase.TestInstrument, 1, False)
strat.addPosExitMarket(datetime.datetime(2000, 10, 12))
strat.run()
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + 30.31 - 27.44, 2))
self.assertTrue(round(strat.getResult(), 3) == 0.105)
self.assertTrue(round(strat.getNetProfit(), 2) == round(30.31 - 27.44, 2))
self.assertEqual(strat.positions[0].getAge().days, 268)
def testLongPositionGTC(self):
strat = self.createStrategy()
strat.getBroker().setCash(48)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-02-07,59.31,60.00,58.42,59.94,44697200,29.30
# 2000-02-04,57.63,58.25,56.81,57.81,40925000,28.26 - sell succeeds
# 2000-02-03,55.38,57.00,54.25,56.69,55540600,27.71 - exit
# 2000-02-02,54.94,56.00,54.00,54.31,63940400,26.55
# 2000-02-01,51.25,54.31,50.00,54.00,57108800,26.40
# 2000-01-31,47.94,50.13,47.06,49.95,68152400,24.42 - buy succeeds
# 2000-01-28,51.50,51.94,46.63,47.38,86400600,23.16 - buy fails
# 2000-01-27,55.81,56.69,50.00,51.81,61061800,25.33 - enterLong
strat.addPosEntry(datetime.datetime(2000, 1, 27), strat.enterLong, BaseTestCase.TestInstrument, 1, True)
strat.addPosExitMarket(datetime.datetime(2000, 2, 3))
strat.run()
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(48 + 57.63 - 47.94, 2))
self.assertTrue(round(strat.getNetProfit(), 2) == round(57.63 - 47.94, 2))
self.assertEqual(strat.positions[0].getAge().days, 4)
def testEntryCanceled(self):
strat = self.createStrategy()
strat.getBroker().setCash(10)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-01-28,51.50,51.94,46.63,47.38,86400600,23.16 - buy fails
# 2000-01-27,55.81,56.69,50.00,51.81,61061800,25.33 - enterLong
strat.addPosEntry(datetime.datetime(2000, 1, 27), strat.enterLong, BaseTestCase.TestInstrument, 1, False)
strat.run()
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.enterOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(strat.getBroker().getCash() == 10)
self.assertTrue(strat.getNetProfit() == 0)
def testUnrealized1(self):
strat = self.createStrategy(True)
# 3/Jan/2011 205300 - Enter long
# 3/Jan/2011 205400 - entry gets filled at 127.21
# 3/Jan/2011 210000 - last bar
strat.addPosEntry(dt.localize(datetime.datetime(2011, 1, 3, 20, 53), pytz.utc), strat.enterLong, BaseTestCase.TestInstrument, 1, True)
strat.run()
self.assertEqual(strat.positions[0].isOpen(), True)
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 0)
entryPrice = 127.21
lastPrice = strat.getFeed().getCurrentBars()[BaseTestCase.TestInstrument].getClose()
self.assertEqual(strat.getActivePosition().getReturn(), (lastPrice - entryPrice) / entryPrice)
self.assertEqual(strat.getActivePosition().getPnL(), lastPrice - entryPrice)
def testUnrealized2(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = TestStrategy(barFeed, instrument, 1000)
strat.addPosEntry(datetime.date(2000, 12, 13), strat.enterLong, instrument, 1, False) # Filled on 2000-12-14 at 29.25.
strat.run()
self.assertEqual(strat.positions[0].isOpen(), True)
self.assertEqual(strat.getActivePosition().getPnL(), 29.06 - 29.25)
self.assertEqual(strat.getActivePosition().getReturn(), (29.06 - 29.25) / 29.25)
def testUnrealizedAdjusted(self):
instrument = "orcl"
barFeed = load_daily_barfeed(instrument)
strat = TestStrategy(barFeed, instrument, 1000)
strat.setUseAdjustedValues(True)
strat.addPosEntry(datetime.date(2000, 12, 13), strat.enterLong, instrument, 1, False) # Filled on 2000-12-14 at 28.60
strat.run()
self.assertEqual(strat.positions[0].isOpen(), True)
self.assertEqual(round(strat.getActivePosition().getPnL(), 2), round(28.41 - 28.60, 2))
self.assertEqual(round(strat.getActivePosition().getReturn(), 2), round((28.41 - 28.60) / 28.60, 2))
def testActiveOrdersAndSharesLong(self):
instrument = "orcl"
testCase = self
class Strategy(strategy.BacktestingStrategy):
def __init__(self, barFeed, cash):
strategy.BacktestingStrategy.__init__(self, barFeed, cash)
self.pos = None
def onBars(self, bars):
if self.pos is None:
self.pos = self.enterLong(instrument, 1, True)
# The entry order should be active.
testCase.assertEqual(len(self.pos.getActiveOrders()), 1)
testCase.assertEqual(self.pos.getShares(), 0)
elif self.pos.isOpen():
# At this point the entry order should have been filled.
testCase.assertEqual(len(self.pos.getActiveOrders()), 0)
testCase.assertEqual(self.pos.getShares(), 1)
self.pos.exitMarket()
testCase.assertEqual(len(self.pos.getActiveOrders()), 1)
testCase.assertEqual(self.pos.getShares(), 1)
else:
# The position was closed.
testCase.assertEqual(len(self.pos.getActiveOrders()), 0)
testCase.assertEqual(self.pos.getShares(), 0)
barFeed = load_daily_barfeed(instrument)
strat = Strategy(barFeed, 1000)
strat.run()
self.assertNotEqual(strat.pos, None)
self.assertEqual(strat.pos.isOpen(), False)
# Entered on 2000-01-04 at 115.50
# Exit on 2000-01-05 at 101.62
self.assertEqual(strat.pos.getPnL(), 101.62 - 115.50)
def testIsOpen_NotClosed(self):
strat = self.createStrategy()
strat.addPosEntry(datetime.datetime(2000, 11, 3), strat.enterLong, BaseTestCase.TestInstrument, 1, False)
strat.run()
self.assertTrue(strat.getActivePosition().isOpen())
def testPartialFillGTC1(self):
# Open and close after entry has been fully filled.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLong, instrument, 4, True)
strat.addPosExitMarket(datetime.datetime(2000, 1, 3))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 11)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 2))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 14)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 5))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC2(self):
# Open and close after entry has been partially filled.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLong, instrument, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 2))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 11)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 2))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 12)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 3))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isCanceled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 2)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC3(self):
class SkipCancelBroker(object):
def __init__(self, decorated):
self.__decorated = decorated
def __getattr__(self, name):
return getattr(self.__decorated, name)
def cancelOrder(self, order):
return
# Open and close after entry has been partially filled.
# Cancelations get skipped and the position is left open.
# The idea is to simulate a real scenario where cancelation gets submited but the order gets
# filled before the cancelation gets processed.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat._setBroker(SkipCancelBroker(strat.getBroker()))
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLong, instrument, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 2))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 1)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 11)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 2))
self.assertEqual(strat.positions[0].isOpen(), True)
self.assertEqual(strat.positions[0].getShares(), 2)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC4(self):
class SkipFirstCancelBroker(object):
def __init__(self, decorated):
self.__decorated = decorated
self.__cancelSkipped = False
def __getattr__(self, name):
return getattr(self.__decorated, name)
def cancelOrder(self, order):
if not self.__cancelSkipped:
self.__cancelSkipped = True
return
self.__decorated.cancelOrder(order)
# Open and close after entry has been partially filled.
# The first cancelation get skipped and a second exit has to be requested to close the position.
# The idea is to simulate a real scenario where cancelation gets submited but the order gets
# filled before the cancelation gets processed.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat._setBroker(SkipFirstCancelBroker(strat.getBroker()))
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLong, instrument, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 2))
# Retry exit.
strat.addPosExitMarket(datetime.datetime(2000, 1, 4))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 11)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 2))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 14)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 5))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
class ShortPosTestCase(BaseTestCase):
def testActiveOrdersAndSharesShort(self):
instrument = "orcl"
testCase = self
class Strategy(strategy.BacktestingStrategy):
def __init__(self, barFeed, cash):
strategy.BacktestingStrategy.__init__(self, barFeed, cash)
self.pos = None
def onBars(self, bars):
if self.pos is None:
self.pos = self.enterShort(instrument, 1, True)
# The entry order should be active.
testCase.assertEqual(len(self.pos.getActiveOrders()), 1)
testCase.assertEqual(self.pos.getShares(), 0)
elif self.pos.isOpen():
# At this point the entry order should have been filled.
testCase.assertEqual(len(self.pos.getActiveOrders()), 0)
testCase.assertEqual(self.pos.getShares(), -1)
self.pos.exitMarket()
testCase.assertEqual(len(self.pos.getActiveOrders()), 1)
testCase.assertEqual(self.pos.getShares(), -1)
else:
# The position was closed.
testCase.assertEqual(len(self.pos.getActiveOrders()), 0)
testCase.assertEqual(self.pos.getShares(), 0)
barFeed = load_daily_barfeed(instrument)
strat = Strategy(barFeed, 1000)
strat.run()
self.assertNotEqual(strat.pos, None)
self.assertEqual(strat.pos.isOpen(), False)
# Entered on 2000-01-04 at 115.50
# Exit on 2000-01-05 at 101.62
self.assertEqual(strat.pos.getPnL(), 115.50 - 101.62)
def testShortPosition(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-08,27.37,27.50,24.50,24.81,63040000,24.26
# 2000-11-07,28.37,28.44,26.50,26.56,58950800,25.97
# 2000-11-06,30.69,30.69,27.50,27.94,75552300,27.32
# 2000-11-03,31.50,31.75,29.50,30.31,65020900,29.64
strat.addPosEntry(datetime.datetime(2000, 11, 3), strat.enterShort, BaseTestCase.TestInstrument, 1, False)
strat.addPosExitMarket(datetime.datetime(2000, 11, 7))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + 30.69 - 27.37, 2))
self.assertTrue(round(strat.getResult(), 3) == round(0.10817856, 3))
self.assertTrue(round(strat.getNetProfit(), 2) == round(30.69 - 27.37, 2))
def testShortPositionAdjClose(self):
strat = self.createStrategy()
strat.setUseAdjustedValues(True)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-10-13,31.00,35.75,31.00,35.63,38516200,34.84
# 2000-10-12,63.81,64.87,61.75,63.00,50892400,30.80
# 2000-01-19,56.13,58.25,54.00,57.13,49208800,27.93
# 2000-01-18,107.87,114.50,105.62,111.25,66791200,27.19
strat.addPosEntry(datetime.datetime(2000, 1, 18), strat.enterShort, BaseTestCase.TestInstrument, 1, False)
strat.addPosExitMarket(datetime.datetime(2000, 10, 12))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + 27.44 - 30.31, 2))
self.assertTrue(round(strat.getResult(), 3) == round(-0.104591837, 3))
self.assertTrue(round(strat.getNetProfit(), 2) == round(27.44 - 30.31, 2))
def testShortPositionExitCanceled(self):
strat = self.createStrategy()
strat.getBroker().setCash(0)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-12-08,30.06,30.62,29.25,30.06,40054100,29.39
# 2000-12-07,29.62,29.94,28.12,28.31,41093000,27.68
# .
# 2000-11-29,23.19,23.62,21.81,22.87,75408100,22.36
# 2000-11-28,23.50,23.81,22.25,22.66,43078300,22.16
strat.addPosEntry(datetime.datetime(2000, 11, 28), strat.enterShort, BaseTestCase.TestInstrument, 1, False)
strat.addPosExitMarket(datetime.datetime(2000, 12, 7))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == 23.19)
self.assertTrue(strat.getNetProfit() == 0)
def testShortPositionExitCanceledAndReSubmitted(self):
strat = self.createStrategy()
strat.getBroker().setCash(0)
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-24,23.31,24.25,23.12,24.12,22446100,23.58
# 2000-11-22,23.62,24.06,22.06,22.31,53317000,21.81 - exitShort that gets filled
# 2000-11-21,24.81,25.62,23.50,23.87,58651900,23.34
# 2000-11-20,24.31,25.87,24.00,24.75,89783100,24.20
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74 - exitShort that gets canceled
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterShort
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterShort, BaseTestCase.TestInstrument, 1)
strat.addPosExitMarket(datetime.datetime(2000, 11, 14))
strat.addPosExitMarket(datetime.datetime(2000, 11, 22))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 1)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(25.12 - 23.31, 2))
def testUnrealized(self):
strat = self.createStrategy(True)
# 3/Jan/2011 205300 - Enter long
# 3/Jan/2011 205400 - entry gets filled at 127.21
# 3/Jan/2011 210000 - last bar
strat.addPosEntry(dt.localize(datetime.datetime(2011, 1, 3, 20, 53), pytz.utc), strat.enterShort, BaseTestCase.TestInstrument, 1, True)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 0)
entryPrice = 127.21
lastPrice = strat.getFeed().getCurrentBars()[BaseTestCase.TestInstrument].getClose()
self.assertEqual(strat.getActivePosition().getReturn(), (entryPrice - lastPrice) / entryPrice)
self.assertEqual(strat.getActivePosition().getPnL(), entryPrice - lastPrice)
class LimitPosTestCase(BaseTestCase):
def testLong(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - exit filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - exitPosition
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 - entry filled
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongLimit, BaseTestCase.TestInstrument, 25, 1)
strat.addPosExitLimit(datetime.datetime(2000, 11, 16), 29)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == 1004)
def testShort(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-24,23.31,24.25,23.12,24.12,22446100,23.58 - exit filled
# 2000-11-22,23.62,24.06,22.06,22.31,53317000,21.81 - exitPosition
# 2000-11-21,24.81,25.62,23.50,23.87,58651900,23.34
# 2000-11-20,24.31,25.87,24.00,24.75,89783100,24.20
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - entry filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - enterShortLimit
strat.addPosEntry(datetime.datetime(2000, 11, 16), strat.enterShortLimit, BaseTestCase.TestInstrument, 29, 1)
strat.addPosExitLimit(datetime.datetime(2000, 11, 22), 24)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (29 - 23.31), 2))
def testExitOnEntryNotFilled(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - entry canceled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - exitPosition
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongLimit, BaseTestCase.TestInstrument, 5, 1, True)
strat.addPosExitLimit(datetime.datetime(2000, 11, 16), 29)
strat.run()
self.assertEqual(strat.enterOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == 1000)
def testExitTwice(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - exit filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - exitPosition using a market order (cancels the previous one).
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74 - exitPosition
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 - entry filled
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongLimit, BaseTestCase.TestInstrument, 25, 1)
strat.addPosExitLimit(datetime.datetime(2000, 11, 14), 100)
strat.addPosExitMarket(datetime.datetime(2000, 11, 16))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (26.94 - 25), 2))
def testExitCancelsEntry(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74 - exitPosition (cancels the entry).
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 -
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongLimit, BaseTestCase.TestInstrument, 5, 1, True)
strat.addPosExitLimit(datetime.datetime(2000, 11, 14), 100)
strat.run()
self.assertEqual(strat.enterOkCalls, 0)
self.assertEqual(strat.enterCanceledCalls, 1)
self.assertEqual(strat.exitOkCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == 1000)
def testEntryGTCExitNotGTC(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23 - GTC exitPosition (never filled)
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74 -
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 - entry filled
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongLimit, BaseTestCase.TestInstrument, 25, 1, True)
strat.addPosExitLimit(datetime.datetime(2000, 11, 15), 100, False)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 0)
self.assertTrue(strat.exitCanceledCalls == 1)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 - 25, 2))
class StopPosTestCase(BaseTestCase):
def testLong(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - exit filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - exitPosition
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 - entry filled
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongStop
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongStop, BaseTestCase.TestInstrument, 25, 1)
strat.addPosExitStop(datetime.datetime(2000, 11, 16), 26)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (26 - 25.12), 2))
def testShort(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-24,23.31,24.25,23.12,24.12,22446100,23.58 - exit filled
# 2000-11-22,23.62,24.06,22.06,22.31,53317000,21.81 - exitPosition
# 2000-11-21,24.81,25.62,23.50,23.87,58651900,23.34
# 2000-11-20,24.31,25.87,24.00,24.75,89783100,24.20
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - entry filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - enterShortStop
strat.addPosEntry(datetime.datetime(2000, 11, 16), strat.enterShortStop, BaseTestCase.TestInstrument, 27, 1)
strat.addPosExitStop(datetime.datetime(2000, 11, 22), 23)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (26.94 - 23.31), 2))
def testPartialFillGTC1(self):
# Open and close after entry has been fully filled.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 6), 15, 15, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLongStop, instrument, 12, 4, True)
strat.addPosExitMarket(datetime.datetime(2000, 1, 4))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 12)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 3))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 15)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 6))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC2(self):
# Open and close after entry has been partially filled.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 6), 15, 15, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLongStop, instrument, 12, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 3))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 12)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 3))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 13)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 4))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isCanceled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 2)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC3(self):
class SkipCancelBroker(object):
def __init__(self, decorated):
self.__decorated = decorated
def __getattr__(self, name):
return getattr(self.__decorated, name)
def cancelOrder(self, order):
return
# Open and close after entry has been partially filled.
# Cancelations get skipped and the position is left open.
# The idea is to simulate a real scenario where cancelation gets submited but the order gets
# filled before the cancelation gets processed.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 6), 15, 15, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat._setBroker(SkipCancelBroker(strat.getBroker()))
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLongStop, instrument, 12, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 3))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 0)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 1)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 12)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 3))
self.assertEqual(strat.positions[0].isOpen(), True)
self.assertEqual(strat.positions[0].getShares(), 2)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
def testPartialFillGTC4(self):
class SkipFirstCancelBroker(object):
def __init__(self, decorated):
self.__decorated = decorated
self.__cancelSkipped = False
def __getattr__(self, name):
return getattr(self.__decorated, name)
def cancelOrder(self, order):
if not self.__cancelSkipped:
self.__cancelSkipped = True
return
self.__decorated.cancelOrder(order)
# Open and close after entry has been partially filled.
# The first cancelation get skipped and a second exit has to be requested to close the position.
# The idea is to simulate a real scenario where cancelation gets submited but the order gets
# filled before the cancelation gets processed.
instrument = "orcl"
bf = TestBarFeed(bar.Frequency.DAY)
bars = [
bar.BasicBar(datetime.datetime(2000, 1, 1), 10, 10, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 2), 11, 11, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 3), 12, 12, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 4), 13, 13, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 5), 14, 14, 10, 10, 10, 10, bar.Frequency.DAY),
bar.BasicBar(datetime.datetime(2000, 1, 6), 15, 15, 10, 10, 10, 10, bar.Frequency.DAY),
]
bf.addBarsFromSequence(instrument, bars)
strat = TestStrategy(bf, instrument, 1000)
strat._setBroker(SkipFirstCancelBroker(strat.getBroker()))
strat.addPosEntry(datetime.datetime(2000, 1, 1), strat.enterLongStop, instrument, 12, 4, True)
# Exit the position before the entry order gets completely filled.
strat.addPosExitMarket(datetime.datetime(2000, 1, 3))
# Retry exit.
strat.addPosExitMarket(datetime.datetime(2000, 1, 5))
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertEqual(strat.exitCanceledCalls, 0)
self.assertEqual(len(strat.posExecutionInfo), 2)
self.assertEqual(strat.posExecutionInfo[0].getPrice(), 12)
self.assertEqual(strat.posExecutionInfo[0].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[0].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[0].getDateTime(), datetime.datetime(2000, 1, 3))
self.assertEqual(strat.posExecutionInfo[1].getPrice(), 15)
self.assertEqual(strat.posExecutionInfo[1].getQuantity(), 2)
self.assertEqual(strat.posExecutionInfo[1].getCommission(), 0)
self.assertEqual(strat.posExecutionInfo[1].getDateTime(), datetime.datetime(2000, 1, 6))
self.assertEqual(strat.positions[0].isOpen(), False)
self.assertEqual(strat.positions[0].getShares(), 0)
self.assertTrue(strat.positions[0].getEntryOrder().isFilled())
self.assertEqual(strat.positions[0].getEntryOrder().getFilled(), 4)
self.assertEqual(strat.positions[0].getEntryOrder().getRemaining(), 0)
self.assertTrue(strat.positions[0].getExitOrder().isFilled())
self.assertEqual(strat.positions[0].getExitOrder().getFilled(), 2)
self.assertEqual(strat.positions[0].getExitOrder().getRemaining(), 0)
class StopLimitPosTestCase(BaseTestCase):
def testLong(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - exit filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - exitPosition
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20 - entry filled
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87 - enterLongStopLimit
strat.addPosEntry(datetime.datetime(2000, 11, 10), strat.enterLongStopLimit, BaseTestCase.TestInstrument, 25.5, 24, 1)
strat.addPosExitStopLimit(datetime.datetime(2000, 11, 16), 27, 28)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (28 - 24), 2))
def testShort(self):
strat = self.createStrategy()
# Date,Open,High,Low,Close,Volume,Adj Close
# 2000-11-24,23.31,24.25,23.12,24.12,22446100,23.58 - exit filled
# 2000-11-22,23.62,24.06,22.06,22.31,53317000,21.81 - exitPosition
# 2000-11-21,24.81,25.62,23.50,23.87,58651900,23.34
# 2000-11-20,24.31,25.87,24.00,24.75,89783100,24.20
# 2000-11-17,26.94,29.25,25.25,28.81,59639400,28.17 - entry filled
# 2000-11-16,28.75,29.81,27.25,27.37,37990000,26.76 - enterShortStopLimit
# 2000-11-15,28.81,29.44,27.70,28.87,50655200,28.23
# 2000-11-14,27.37,28.50,26.50,28.37,77496700,27.74
# 2000-11-13,25.12,25.87,23.50,24.75,61651900,24.20
# 2000-11-10,26.44,26.94,24.87,25.44,54614100,24.87
strat.addPosEntry(datetime.datetime(2000, 11, 16), strat.enterShortStopLimit, BaseTestCase.TestInstrument, 27, 29, 1)
strat.addPosExitStopLimit(datetime.datetime(2000, 11, 22), 24, 25)
strat.run()
self.assertEqual(strat.enterOkCalls, 1)
self.assertEqual(strat.enterCanceledCalls, 0)
self.assertEqual(strat.exitOkCalls, 1)
self.assertTrue(strat.exitCanceledCalls == 0)
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (29 - 24), 2))
| 47.591932
| 149
| 0.65706
| 7,473
| 61,346
| 5.365583
| 0.066506
| 0.099509
| 0.122702
| 0.041898
| 0.859615
| 0.841459
| 0.826596
| 0.812804
| 0.786917
| 0.77098
| 0
| 0.12384
| 0.209826
| 61,346
| 1,288
| 150
| 47.628882
| 0.703346
| 0.162358
| 0
| 0.732807
| 0
| 0
| 0.0042
| 0.001446
| 0
| 0
| 0
| 0
| 0.40248
| 1
| 0.101466
| false
| 0
| 0.015784
| 0.010147
| 0.158963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
73f736a7336a7fecd9c743a7ebf8ef3b3b2ab2db
| 1,050
|
py
|
Python
|
config/settings/__init__.py
|
django-daiquiri/app
|
9c9636d1fc4224c8daa8cdaab3620036d0f09fad
|
[
"Apache-2.0"
] | null | null | null |
config/settings/__init__.py
|
django-daiquiri/app
|
9c9636d1fc4224c8daa8cdaab3620036d0f09fad
|
[
"Apache-2.0"
] | null | null | null |
config/settings/__init__.py
|
django-daiquiri/app
|
9c9636d1fc4224c8daa8cdaab3620036d0f09fad
|
[
"Apache-2.0"
] | 1
|
2017-08-06T12:46:47.000Z
|
2017-08-06T12:46:47.000Z
|
# include settimgs from daiquiri
from daiquiri.core.settings.django import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.datalink.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
| 30.882353
| 66
| 0.793333
| 140
| 1,050
| 5.95
| 0.285714
| 0.288115
| 0.388956
| 0.436975
| 0.158463
| 0.05042
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132381
| 1,050
| 33
| 67
| 31.818182
| 0.91438
| 0.147619
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.074074
| 0.851852
| 0
| 0.851852
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
fb7b5e46baef50e29a39b33e81fc6d607485204c
| 201
|
py
|
Python
|
imports.py
|
Convergant/TownOfSalemRolelistSimulator
|
d4ef6da4d386fff4ca840a05617ae0b806e6d0e6
|
[
"Unlicense"
] | null | null | null |
imports.py
|
Convergant/TownOfSalemRolelistSimulator
|
d4ef6da4d386fff4ca840a05617ae0b806e6d0e6
|
[
"Unlicense"
] | null | null | null |
imports.py
|
Convergant/TownOfSalemRolelistSimulator
|
d4ef6da4d386fff4ca840a05617ae0b806e6d0e6
|
[
"Unlicense"
] | null | null | null |
from TownOfSalem.RoleList import *
from TownOfSalem.Role import *
from TownOfSalem.Faction import *
from TownOfSalem.Alignment import *
from random import choice, shuffle
from time import time
| 25.125
| 36
| 0.791045
| 25
| 201
| 6.36
| 0.44
| 0.377358
| 0.396226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 201
| 7
| 37
| 28.714286
| 0.946429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fbc81e4b207e0ff213be178b7351bbc6e08f7402
| 240
|
py
|
Python
|
python/pattern.py
|
iamashiq/Hacktoberfest2021-2
|
9823996e9e97a25fcf70abc6fd6c55e4b60da568
|
[
"MIT"
] | 6
|
2021-10-04T07:57:24.000Z
|
2021-11-15T13:35:21.000Z
|
python/pattern.py
|
iamashiq/Hacktoberfest2021-2
|
9823996e9e97a25fcf70abc6fd6c55e4b60da568
|
[
"MIT"
] | 2
|
2021-10-14T16:55:50.000Z
|
2021-10-31T12:17:20.000Z
|
python/pattern.py
|
iamashiq/Hacktoberfest2021-2
|
9823996e9e97a25fcf70abc6fd6c55e4b60da568
|
[
"MIT"
] | 33
|
2021-10-03T05:00:58.000Z
|
2021-11-05T19:49:19.000Z
|
# *
# **
# ***
# ****
# *****
# ****
# ***
# **
# *
num= 5
for i in range(num+1):
for j in range(i):
print("*",end="")
print("\n")
for i in range(num-1,0,-1):
for j in range(i):
print("*",end="")
print("\n")
| 12.631579
| 27
| 0.366667
| 34
| 240
| 2.588235
| 0.352941
| 0.318182
| 0.136364
| 0.25
| 0.943182
| 0.943182
| 0.613636
| 0.613636
| 0.613636
| 0.613636
| 0
| 0.029762
| 0.3
| 240
| 18
| 28
| 13.333333
| 0.494048
| 0.1375
| 0
| 0.666667
| 0
| 0
| 0.030457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
fbc9eaae90c116daa229db7fb496adb30431fcdb
| 1,391
|
py
|
Python
|
program/testy/test_MyEval.py
|
peter2141/IBT
|
8e6b1ac68680152ad744007aaf2b9e0a6d070d80
|
[
"Apache-2.0"
] | null | null | null |
program/testy/test_MyEval.py
|
peter2141/IBT
|
8e6b1ac68680152ad744007aaf2b9e0a6d070d80
|
[
"Apache-2.0"
] | null | null | null |
program/testy/test_MyEval.py
|
peter2141/IBT
|
8e6b1ac68680152ad744007aaf2b9e0a6d070d80
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import sys
sys.path.append('..')
import evaluation
import global_var
class TestMyEval(unittest.TestCase):
def test_myeval_str_right_eq_true(self):
global_var.flags = [False]
evaluation.myEval([['asd', 'test1']], '{} == "test1"', 0)
self.assertEqual(global_var.flags[0], True)
def test_myeval_str_left_eq_true(self):
global_var.flags = [False]
evaluation.myEval([['asd', 'test1']], '"test1" == {}', 0)
self.assertEqual(global_var.flags[0], True)
def test_myeval_str_right_eq_false(self):
global_var.flags = [False]
evaluation.myEval([['asd', 'test2']], '{} == "test1"', 0)
self.assertEqual(global_var.flags[0], False)
def test_myeval_str_left_eq_false(self):
global_var.flags = [False]
evaluation.myEval([['asd', 'test2']], '"test1" == {}', 0)
self.assertEqual(global_var.flags[0], False)
def test_myeval_not_eq_true(self):
global_var.flags = [False]
evaluation.myEval([['1', '2', '3', '4'], ['10', '0', '6']], '{} != {}', 0)
self.assertEqual(global_var.flags[0], True)
def test_myeval_not_eq_false(self):
global_var.flags = [False]
evaluation.myEval([['1', '2', '3', '4'], ['10', '0', '3']], '{} != {}', 0)
self.assertEqual(global_var.flags[0], False)
if __name__ == '__main__':
unittest.main()
| 33.926829
| 82
| 0.601725
| 177
| 1,391
| 4.451977
| 0.20904
| 0.148477
| 0.213198
| 0.137056
| 0.845178
| 0.837563
| 0.790609
| 0.790609
| 0.744924
| 0.728426
| 0
| 0.03252
| 0.20417
| 1,391
| 40
| 83
| 34.775
| 0.679313
| 0
| 0
| 0.375
| 0
| 0
| 0.090582
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 1
| 0.1875
| false
| 0
| 0.125
| 0
| 0.34375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbd4274f4a0bdc4a8df04a993523820eb953a6b3
| 5,557
|
py
|
Python
|
tests/test_sns/test_subscriptions.py
|
aimannajjar/moto
|
0f67a74d254127d4a64570145966679ab68d9f3d
|
[
"Apache-2.0"
] | 1
|
2021-08-14T05:58:23.000Z
|
2021-08-14T05:58:23.000Z
|
tests/test_sns/test_subscriptions.py
|
aimannajjar/moto
|
0f67a74d254127d4a64570145966679ab68d9f3d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_sns/test_subscriptions.py
|
aimannajjar/moto
|
0f67a74d254127d4a64570145966679ab68d9f3d
|
[
"Apache-2.0"
] | 1
|
2021-03-01T08:48:09.000Z
|
2021-03-01T08:48:09.000Z
|
from __future__ import unicode_literals
import boto
import sure # noqa
from moto import mock_sns_deprecated
from moto.sns.models import DEFAULT_PAGE_SIZE
@mock_sns_deprecated
def test_creating_subscription():
conn = boto.connect_sns()
conn.create_topic("some-topic")
topics_json = conn.get_all_topics()
topic_arn = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"][0][
"TopicArn"
]
conn.subscribe(topic_arn, "http", "http://example.com/")
subscriptions = conn.get_all_subscriptions()["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["Subscriptions"]
subscriptions.should.have.length_of(1)
subscription = subscriptions[0]
subscription["TopicArn"].should.equal(topic_arn)
subscription["Protocol"].should.equal("http")
subscription["SubscriptionArn"].should.contain(topic_arn)
subscription["Endpoint"].should.equal("http://example.com/")
# Now unsubscribe the subscription
conn.unsubscribe(subscription["SubscriptionArn"])
# And there should be zero subscriptions left
subscriptions = conn.get_all_subscriptions()["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["Subscriptions"]
subscriptions.should.have.length_of(0)
@mock_sns_deprecated
def test_deleting_subscriptions_by_deleting_topic():
conn = boto.connect_sns()
conn.create_topic("some-topic")
topics_json = conn.get_all_topics()
topic_arn = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"][0][
"TopicArn"
]
conn.subscribe(topic_arn, "http", "http://example.com/")
subscriptions = conn.get_all_subscriptions()["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["Subscriptions"]
subscriptions.should.have.length_of(1)
subscription = subscriptions[0]
subscription["TopicArn"].should.equal(topic_arn)
subscription["Protocol"].should.equal("http")
subscription["SubscriptionArn"].should.contain(topic_arn)
subscription["Endpoint"].should.equal("http://example.com/")
# Now delete the topic
conn.delete_topic(topic_arn)
# And there should now be 0 topics
topics_json = conn.get_all_topics()
topics = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"]
topics.should.have.length_of(0)
# And there should be zero subscriptions left
subscriptions = conn.get_all_subscriptions()["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["Subscriptions"]
subscriptions.should.have.length_of(0)
@mock_sns_deprecated
def test_getting_subscriptions_by_topic():
conn = boto.connect_sns()
conn.create_topic("topic1")
conn.create_topic("topic2")
topics_json = conn.get_all_topics()
topics = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"]
topic1_arn = topics[0]["TopicArn"]
topic2_arn = topics[1]["TopicArn"]
conn.subscribe(topic1_arn, "http", "http://example1.com/")
conn.subscribe(topic2_arn, "http", "http://example2.com/")
topic1_subscriptions = conn.get_all_subscriptions_by_topic(topic1_arn)[
"ListSubscriptionsByTopicResponse"
]["ListSubscriptionsByTopicResult"]["Subscriptions"]
topic1_subscriptions.should.have.length_of(1)
topic1_subscriptions[0]["Endpoint"].should.equal("http://example1.com/")
@mock_sns_deprecated
def test_subscription_paging():
conn = boto.connect_sns()
conn.create_topic("topic1")
conn.create_topic("topic2")
topics_json = conn.get_all_topics()
topics = topics_json["ListTopicsResponse"]["ListTopicsResult"]["Topics"]
topic1_arn = topics[0]["TopicArn"]
topic2_arn = topics[1]["TopicArn"]
for index in range(DEFAULT_PAGE_SIZE + int(DEFAULT_PAGE_SIZE / 3)):
conn.subscribe(topic1_arn, "email", "email_" + str(index) + "@test.com")
conn.subscribe(topic2_arn, "email", "email_" + str(index) + "@test.com")
all_subscriptions = conn.get_all_subscriptions()
all_subscriptions["ListSubscriptionsResponse"]["ListSubscriptionsResult"][
"Subscriptions"
].should.have.length_of(DEFAULT_PAGE_SIZE)
next_token = all_subscriptions["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["NextToken"]
next_token.should.equal(DEFAULT_PAGE_SIZE)
all_subscriptions = conn.get_all_subscriptions(next_token=next_token * 2)
all_subscriptions["ListSubscriptionsResponse"]["ListSubscriptionsResult"][
"Subscriptions"
].should.have.length_of(int(DEFAULT_PAGE_SIZE * 2 / 3))
next_token = all_subscriptions["ListSubscriptionsResponse"][
"ListSubscriptionsResult"
]["NextToken"]
next_token.should.equal(None)
topic1_subscriptions = conn.get_all_subscriptions_by_topic(topic1_arn)
topic1_subscriptions["ListSubscriptionsByTopicResponse"][
"ListSubscriptionsByTopicResult"
]["Subscriptions"].should.have.length_of(DEFAULT_PAGE_SIZE)
next_token = topic1_subscriptions["ListSubscriptionsByTopicResponse"][
"ListSubscriptionsByTopicResult"
]["NextToken"]
next_token.should.equal(DEFAULT_PAGE_SIZE)
topic1_subscriptions = conn.get_all_subscriptions_by_topic(
topic1_arn, next_token=next_token
)
topic1_subscriptions["ListSubscriptionsByTopicResponse"][
"ListSubscriptionsByTopicResult"
]["Subscriptions"].should.have.length_of(int(DEFAULT_PAGE_SIZE / 3))
next_token = topic1_subscriptions["ListSubscriptionsByTopicResponse"][
"ListSubscriptionsByTopicResult"
]["NextToken"]
next_token.should.equal(None)
| 37.046667
| 83
| 0.732949
| 578
| 5,557
| 6.769896
| 0.143599
| 0.061334
| 0.035778
| 0.046001
| 0.840787
| 0.807309
| 0.774853
| 0.760542
| 0.750319
| 0.651163
| 0
| 0.010095
| 0.144322
| 5,557
| 149
| 84
| 37.295302
| 0.812829
| 0.032212
| 0
| 0.706897
| 0
| 0
| 0.268107
| 0.129212
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.043103
| 0
| 0.077586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8370f089fa367b24c0820095fc09424c7dccd272
| 10,621
|
py
|
Python
|
xcape/test/test_core.py
|
rabernat/xcape
|
bb19a68917c70f7ce9e7564f2181699b2fee9d56
|
[
"MIT"
] | null | null | null |
xcape/test/test_core.py
|
rabernat/xcape
|
bb19a68917c70f7ce9e7564f2181699b2fee9d56
|
[
"MIT"
] | null | null | null |
xcape/test/test_core.py
|
rabernat/xcape
|
bb19a68917c70f7ce9e7564f2181699b2fee9d56
|
[
"MIT"
] | null | null | null |
import numpy as np
import xarray as xr
from itertools import combinations
import dask.array as dsa
from ..core import calc_cape
from ..core import calc_srh
from .fixtures import empty_dask_array, dataset_soundings
import pytest
@pytest.fixture(scope='module')
def p_t_td_1d(nlevs=20):
p = np.random.rand(nlevs)
t = np.random.rand(nlevs)
td = np.random.rand(nlevs)
return p, t, td
@pytest.fixture(scope='module')
def p_t_td_3d(nlevs=20, nx=10, ny=5):
p = np.random.rand(nlevs, ny, nx)
t = np.random.rand(nlevs, ny, nx)
td = np.random.rand(nlevs, ny, nx)
return p, t, td
@pytest.fixture(scope='module')
def p_t_td_surface(nx=10, ny=5):
ps = np.random.rand(ny, nx)
ts = np.random.rand(ny, nx)
tds = np.random.rand(ny, nx)
return ps, ts, tds
# surface mode returns cape, cin
# most-unstable mode returns cape, cin, mulev, zmulev
@pytest.mark.parametrize('sourcein,n_returns',
[('surface', 2), ('most-unstable', 4)])
def test_calc_cape_shape_3d(p_t_td_3d, p_t_td_surface, sourcein, n_returns):
p, t, td = p_t_td_3d
ps, ts, tds = p_t_td_surface
result = calc_cape(p, t, td, ps, ts, tds, source=sourcein, method='dummy')
assert len(result) == n_returns
for data in result:
assert data.shape == (1, p.shape[1], p.shape[2])
# tolerance for tests
decimal_cape = 0
decimal_cin = 0
decimal_mulv = 0
decimal_zmulv = 0
def test_calc_surface_cape_model_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
cape, cin = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='surface', ml_depth=500., adiabat='pseudo-liquid',
pinc=100.,
method='fortran', vertical_lev='sigma', pres_lev_pos=1)
np.testing.assert_almost_equal(cape[0], ds.SB_CAPE_pinc100.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.SB_CIN_pinc100.values, decimal_cin)
def test_calc_most_unstable_cape_model_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
# in real data, the surface values will come in separate variables
cape, cin, mulv, zmulv = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='most-unstable', ml_depth=500., adiabat='pseudo-liquid',
pinc=100.,
method='fortran', vertical_lev='sigma', pres_lev_pos=1)
np.testing.assert_almost_equal(cape[0], ds.MU_CAPE_pinc100.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.MU_CIN_pinc100.values, decimal_cin)
np.testing.assert_almost_equal(mulv[0], ds.MU_lv_pinc100.values.astype('int32'), decimal_mulv)
np.testing.assert_almost_equal(zmulv[0], ds.MU_z_pinc100.values, decimal_zmulv)
def test_calc_mixed_layer_cape_model_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
cape, cin = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='mixed-layer', ml_depth=500., adiabat='pseudo-liquid',
pinc=1000.,
method='fortran', vertical_lev='sigma', pres_lev_pos=1)
np.testing.assert_almost_equal(cape[0], ds.ML_CAPE_pinc1000_mldepth500.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.ML_CIN_pinc1000_mldepth500.values, decimal_cin)
def test_calc_surface_cape_pressure_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
cape, cin = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='surface', ml_depth=500., adiabat='pseudo-liquid',
pinc=100.,
method='fortran', vertical_lev='pressure',
pres_lev_pos=ds.pressure.values[0]*0+1)
np.testing.assert_almost_equal(cape[0], ds.SB_CAPE_pinc100.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.SB_CIN_pinc100.values, decimal_cin)
def test_calc_most_unstable_cape_pressure_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
# in real data, the surface values will come in separate variables
cape, cin, mulv, zmulv = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='most-unstable', ml_depth=500., adiabat='pseudo-liquid',
pinc=100.,
method='fortran', vertical_lev='pressure',
pres_lev_pos=ds.pressure.values[0]*0+1)
np.testing.assert_almost_equal(cape[0], ds.MU_CAPE_pinc100.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.MU_CIN_pinc100.values, decimal_cin)
np.testing.assert_almost_equal(mulv[0], ds.MU_lv_pinc100.values.astype('int32'), decimal_mulv)
np.testing.assert_almost_equal(zmulv[0], ds.MU_z_pinc100.values, decimal_zmulv)
def test_calc_mixed_layer_cape_pressure_lev(dataset_soundings):
"""Test Surface Cape based on previously calculated using George Bryans code"""
ds = dataset_soundings
cape, cin = calc_cape(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
source='mixed-layer', ml_depth=500., adiabat='pseudo-liquid',
pinc=1000.,
method='fortran', vertical_lev='pressure',
pres_lev_pos=ds.pressure.values[0]*0+1)
np.testing.assert_almost_equal(cape[0], ds.ML_CAPE_pinc1000_mldepth500.values, decimal_cape)
np.testing.assert_almost_equal(cin[0], ds.ML_CIN_pinc1000_mldepth500.values, decimal_cin)
def test_calc_srh_model_lev(dataset_soundings):
"""Test SRH code"""
ds = dataset_soundings
srh, rm, lm, mean_6km = calc_srh(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.u_wind_ms.values[1:],
ds.v_wind_ms.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
ds.u_wind_ms.values[0],
ds.v_wind_ms.values[0],
depth = 3000,
vertical_lev='sigma', pres_lev_pos=1,
output_var='all')
srh2 = calc_srh(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.u_wind_ms.values[1:],
ds.v_wind_ms.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
ds.u_wind_ms.values[0],
ds.v_wind_ms.values[0],
depth = 3000,
vertical_lev='sigma', pres_lev_pos=1,
output_var='srh')
np.testing.assert_almost_equal(srh[0], ds.SRH03_model_lev.values, 5)
np.testing.assert_almost_equal(srh2[0], ds.SRH03_model_lev.values, 5)
def test_calc_srh_pressure_lev(dataset_soundings):
"""Test SRH code"""
ds = dataset_soundings
srh, rm, lm, mean_6km = calc_srh(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.u_wind_ms.values[1:],
ds.v_wind_ms.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
ds.u_wind_ms.values[0],
ds.v_wind_ms.values[0],
depth = 3000,
vertical_lev='pressure',
pres_lev_pos=ds.pressure.values[0]*0+1,
output_var='all')
srh2 = calc_srh(ds.pressure.values[1:],
ds.temperature.values[1:],
ds.dewpoint.values[1:],
ds.u_wind_ms.values[1:],
ds.v_wind_ms.values[1:],
ds.pressure.values[0],
ds.temperature.values[0],
ds.dewpoint.values[0],
ds.u_wind_ms.values[0],
ds.v_wind_ms.values[0],
depth = 3000,
vertical_lev='pressure',
pres_lev_pos=ds.pressure.values[0]*0+1,
output_var='srh')
np.testing.assert_almost_equal(srh[0], ds.SRH03_pressure_lev.values, 5)
np.testing.assert_almost_equal(srh2[0], ds.SRH03_pressure_lev.values, 5)
| 45.780172
| 98
| 0.550796
| 1,295
| 10,621
| 4.301158
| 0.105792
| 0.025853
| 0.0614
| 0.075404
| 0.883124
| 0.857092
| 0.845781
| 0.837522
| 0.831957
| 0.831957
| 0
| 0.039914
| 0.339516
| 10,621
| 231
| 99
| 45.978355
| 0.754098
| 0.066378
| 0
| 0.752688
| 0
| 0
| 0.033431
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 1
| 0.064516
| false
| 0
| 0.043011
| 0
| 0.123656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83c73f7684d39233cdaa48520318b8aef11d02ea
| 177
|
py
|
Python
|
dislog/util/__init__.py
|
daberg/dislog
|
29c9c980ebf7c1f40e79de946ec1a0ab6ec10130
|
[
"MIT"
] | null | null | null |
dislog/util/__init__.py
|
daberg/dislog
|
29c9c980ebf7c1f40e79de946ec1a0ab6ec10130
|
[
"MIT"
] | null | null | null |
dislog/util/__init__.py
|
daberg/dislog
|
29c9c980ebf7c1f40e79de946ec1a0ab6ec10130
|
[
"MIT"
] | null | null | null |
from dislog.util.debug import debug
from dislog.util.generator import isgenerator
from dislog.util.rand import rand_cyclic_zstar
from dislog.util.rand import rand_zstar_element
| 35.4
| 47
| 0.864407
| 28
| 177
| 5.321429
| 0.392857
| 0.268456
| 0.375839
| 0.241611
| 0.375839
| 0.375839
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090395
| 177
| 4
| 48
| 44.25
| 0.925466
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
83c8982d575994c7a5d63b6c27959b5d3ea221cd
| 151,753
|
py
|
Python
|
testing/buildbot/generate_buildbot_json_unittest.py
|
Ron423c/chromium
|
2edf7b980065b648f8b2a6e52193d83832fe36b7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 575
|
2015-06-18T23:58:20.000Z
|
2022-03-23T09:32:39.000Z
|
testing/buildbot/generate_buildbot_json_unittest.py
|
Ron423c/chromium
|
2edf7b980065b648f8b2a6e52193d83832fe36b7
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 113
|
2015-05-04T09:58:14.000Z
|
2022-01-31T19:35:03.000Z
|
testing/buildbot/generate_buildbot_json_unittest.py
|
iridium-browser/iridium-browser
|
907e31cf5ce5ad14d832796e3a7c11e496828959
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 52
|
2015-07-14T10:40:50.000Z
|
2022-03-15T01:11:49.000Z
|
#!/usr/bin/env vpython
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for generate_buildbot_json.py."""
import argparse
import contextlib
import json
import os
import unittest
import generate_buildbot_json
from pyfakefs import fake_filesystem_unittest
EMPTY_PYL_FILE = """\
{
}
"""
# Use this value to refer to the directory containing this code
# The tests use a fake filesystem and python filesystem calls are monkey-patched
# to use the fake filesystem, which affects abspath
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class TestCase(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.fs.cwd = THIS_DIR
self.args = generate_buildbot_json.BBJSONGenerator.parse_args([])
def override_args(self, **kwargs):
for k, v in kwargs.iteritems():
setattr(self.args, k, v)
def create_testing_buildbot_json_file(self, path, contents):
return self.fs.create_file(os.path.join(THIS_DIR, path), contents=contents)
@contextlib.contextmanager
def dump_on_failure(fbb, dump=True):
try:
yield
except:
if dump:
for l in fbb.printed_lines:
print l
raise
class FakeBBGen(generate_buildbot_json.BBJSONGenerator):
def __init__(self,
args,
waterfalls,
test_suites,
luci_milo_cfg,
project_pyl='{"validate_source_side_specs_have_builder": True}',
exceptions=EMPTY_PYL_FILE,
mixins=EMPTY_PYL_FILE,
gn_isolate_map=EMPTY_PYL_FILE,
variants=EMPTY_PYL_FILE):
super(FakeBBGen, self).__init__(args)
pyl_files_dir = args.pyl_files_dir or THIS_DIR
infra_config_dir = args.infra_config_dir
files = {
(pyl_files_dir, 'waterfalls.pyl'): waterfalls,
(pyl_files_dir, 'test_suites.pyl'): test_suites,
(pyl_files_dir, 'test_suite_exceptions.pyl'): exceptions,
(pyl_files_dir, 'mixins.pyl'): mixins,
(pyl_files_dir, 'gn_isolate_map.pyl'): gn_isolate_map,
(pyl_files_dir, 'gn_isolate_map2.pyl'): GPU_TELEMETRY_GN_ISOLATE_MAP,
(pyl_files_dir, 'variants.pyl'): variants,
(infra_config_dir, 'generated/project.pyl'): project_pyl,
(infra_config_dir, 'generated/luci-milo.cfg'): luci_milo_cfg,
(infra_config_dir, 'generated/luci-milo-dev.cfg'): '',
}
for (d, filename), content in files.iteritems():
if content is None:
continue
path = os.path.join(d, filename)
parent = os.path.abspath(os.path.dirname(path))
if not os.path.exists(parent):
os.makedirs(parent)
with open(path, 'w') as f:
f.write(content)
self.printed_lines = []
def print_line(self, line):
self.printed_lines.append(line)
# pragma pylint: disable=arguments-differ
def check_output_file_consistency(self, verbose=False, dump=True):
with dump_on_failure(self, dump=verbose and dump):
super(FakeBBGen, self).check_output_file_consistency(verbose)
# pragma pylint: enable=arguments-differ
FOO_GTESTS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'swarming': {
'dimension_sets': [
{
'kvm': '1',
},
],
},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_WITH_ENABLE_FEATURES_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'foo_tests',
},
'args': [
'--enable-features=Baz',
],
},
},
},
]
"""
FOO_CHROMEOS_TRIGGER_SCRIPT_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'swarming': {
'dimension_sets': [
{
"device_type": "foo_device",
},
],
},
'test_suites': {
'gtest_tests': 'foo_tests',
},
'os_type': 'chromeos',
},
},
},
]
"""
FOO_LINUX_GTESTS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'linux',
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
COMPOSITION_GTEST_SUITE_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'composition_tests',
},
},
},
},
]
"""
COMPOSITION_GTEST_SUITE_WITH_ARGS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'composition_tests',
},
'args': [
'--this-is-an-argument',
],
},
},
},
]
"""
FOO_ISOLATED_SCRIPTS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'isolated_scripts': 'composition_tests',
},
},
},
},
]
"""
FOO_ISOLATED_SCRIPTS_WATERFALL_ANDROID = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'android',
'test_suites': {
'isolated_scripts': 'composition_tests',
},
'use_android_presentation': True,
},
},
},
]
"""
FOO_SCRIPT_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'scripts': 'foo_scripts',
},
},
},
},
]
"""
FOO_SCRIPT_WATERFALL_MACHINE_FORBIDS_SCRIPT_TESTS = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'forbid_script_tests': True,
'test_suites': {
'scripts': 'foo_scripts',
},
},
},
},
]
"""
FOO_SCRIPT_WATERFALL_FORBID_SCRIPT_TESTS = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'forbid_script_tests': True,
'machines': {
'Fake Tester': {
'test_suites': {
'scripts': 'foo_scripts',
},
},
},
},
]
"""
FOO_JUNIT_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'junit_tests': 'composition_tests',
},
},
},
},
]
"""
FOO_GPU_TELEMETRY_TEST_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'win',
'browser_config': 'release',
'swarming': {
'dimension_sets': [
{
'gpu': '10de:1cb3',
},
],
},
'test_suites': {
'gpu_telemetry_tests': 'composition_tests',
},
},
},
},
]
"""
NVIDIA_GPU_TELEMETRY_TEST_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'win',
'browser_config': 'release',
'swarming': {
'dimension_sets': [
{
'gpu': '10de:1cb3-26.21.14.3102',
},
],
},
'test_suites': {
'gpu_telemetry_tests': 'composition_tests',
},
},
},
},
]
"""
INTEL_GPU_TELEMETRY_TEST_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'win',
'browser_config': 'release',
'swarming': {
'dimension_sets': [
{
'gpu': '8086:5912-24.20.100.6286',
},
],
},
'test_suites': {
'gpu_telemetry_tests': 'composition_tests',
},
},
},
},
]
"""
INTEL_UHD_GPU_TELEMETRY_TEST_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'os_type': 'win',
'browser_config': 'release',
'swarming': {
'dimension_sets': [
{
'gpu': '8086:3e92-24.20.100.6286',
},
],
},
'test_suites': {
'gpu_telemetry_tests': 'composition_tests',
},
},
},
},
]
"""
UNKNOWN_TEST_SUITE_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'baz_tests',
},
},
},
},
]
"""
UNKNOWN_TEST_SUITE_TYPE_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'foo_tests',
'foo_test_type': 'foo_tests',
},
},
},
},
]
"""
ANDROID_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Android Builder': {
'additional_compile_targets': [
'bar_test',
],
},
'Fake Android K Tester': {
'additional_compile_targets': [
'bar_test',
],
'swarming': {
'dimension_sets': [
{
'device_os': 'KTU84P',
'device_type': 'hammerhead',
'os': 'Android',
},
],
},
'os_type': 'android',
'skip_merge_script': True,
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
'Fake Android L Tester': {
'swarming': {
'dimension_sets': [
{
'device_os': 'LMY41U',
'device_os_type': 'user',
'device_type': 'hammerhead',
'os': 'Android',
},
],
},
'os_type': 'android',
'skip_merge_script': True,
'skip_output_links': True,
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
'Fake Android M Tester': {
'swarming': {
'dimension_sets': [
{
'device_os': 'MMB29Q',
'device_type': 'bullhead',
'os': 'Android',
},
],
},
'os_type': 'android',
'use_swarming': False,
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
UNKNOWN_BOT_GTESTS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Unknown Bot': {
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
MATRIX_GTEST_SUITE_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'matrix_tests',
},
},
},
},
]
"""
MATRIX_GTEST_SUITE_WATERFALL_MIXINS = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['dimension_mixin'],
'test_suites': {
'gtest_tests': 'matrix_tests',
},
},
},
},
]
"""
FOO_TEST_SUITE = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'swarming': {
'dimension_sets': [
{
'integrity': 'high',
}
],
'expiration': 120,
},
},
},
},
}
"""
FOO_TEST_SUITE_NO_DIMENSIONS = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
},
},
},
}
"""
FOO_TEST_SUITE_NOT_SORTED = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
'a_test': {},
},
},
}
"""
FOO_TEST_SUITE_WITH_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'args': [
'--c_arg',
],
},
},
},
}
"""
FOO_TEST_SUITE_WITH_LINUX_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'linux_args': [
'--no-xvfb',
],
},
},
},
}
"""
FOO_TEST_SUITE_WITH_ENABLE_FEATURES = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'args': [
'--enable-features=Foo,Bar',
],
},
},
},
}
"""
FOO_TEST_SUITE_WITH_REMOVE_WATERFALL_MIXIN = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'remove_mixins': ['waterfall_mixin'],
'swarming': {
'dimension_sets': [
{
'integrity': 'high',
}
],
'expiration': 120,
},
},
},
},
}
"""
FOO_TEST_SUITE_WITH_REMOVE_BUILDER_MIXIN = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'remove_mixins': ['builder_mixin'],
'swarming': {
'dimension_sets': [
{
'integrity': 'high',
}
],
'expiration': 120,
},
},
},
},
}
"""
FOO_SCRIPT_SUITE = """\
{
'basic_suites': {
'foo_scripts': {
'foo_test': {
'script': 'foo.py',
},
'bar_test': {
'script': 'bar.py',
},
},
},
}
"""
GOOD_COMPOSITION_TEST_SUITES = """\
{
'basic_suites': {
'bar_tests': {
'bar_test': {},
},
'foo_tests': {
'foo_test': {},
},
},
'compound_suites': {
'composition_tests': [
'foo_tests',
'bar_tests',
],
},
}
"""
BAD_COMPOSITION_TEST_SUITES = """\
{
'basic_suites': {
'bar_tests': {},
'foo_tests': {},
},
'compound_suites': {
'buggy_composition_tests': [
'bar_tests',
],
'composition_tests': [
'foo_tests',
'buggy_composition_tests',
],
},
}
"""
CONFLICTING_COMPOSITION_TEST_SUITES = """\
{
'basic_suites': {
'bar_tests': {
'baz_tests': {
'args': [
'--bar',
],
}
},
'foo_tests': {
'baz_tests': {
'args': [
'--foo',
],
}
},
},
'compound_suites': {
'foobar_tests': [
'foo_tests',
'bar_tests',
],
},
}
"""
DUPLICATES_COMPOSITION_TEST_SUITES = """\
{
'basic_suites': {
'bar_tests': {},
'buggy_composition_tests': {},
'foo_tests': {},
},
'compound_suites': {
'bar_tests': [
'foo_tests',
],
'composition_tests': [
'foo_tests',
'buggy_composition_tests',
],
},
}
"""
SCRIPT_SUITE = """\
{
'basic_suites': {
'foo_scripts': {
'foo_test': {
'script': 'foo.py',
},
},
},
}
"""
UNREFED_TEST_SUITE = """\
{
'basic_suites': {
'bar_tests': {},
'foo_tests': {},
},
}
"""
REUSING_TEST_WITH_DIFFERENT_NAME = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
'variation_test': {
'args': [
'--variation',
],
'test': 'foo_test',
},
},
},
}
"""
COMPOSITION_SUITE_WITH_NAME_NOT_ENDING_IN_TEST = """\
{
'basic_suites': {
'foo_tests': {
'foo': {},
},
'bar_tests': {
'bar_test': {},
},
},
'compound_suites': {
'composition_tests': [
'foo_tests',
'bar_tests',
],
},
}
"""
COMPOSITION_SUITE_WITH_GPU_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'foo': {
'args': [
'--gpu-vendor-id',
'${gpu_vendor_id}',
'--gpu-device-id',
'${gpu_device_id}',
],
},
},
'bar_tests': {
'bar_test': {},
},
},
'compound_suites': {
'composition_tests': [
'foo_tests',
'bar_tests',
],
},
}
"""
GTEST_AS_ISOLATED_SCRIPT_SUITE = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'script': 'foo.py',
'use_isolated_scripts_api': True,
},
},
},
}
"""
SCRIPT_WITH_ARGS_EXCEPTIONS = """\
{
'foo_test': {
'modifications': {
'Fake Tester': {
'args': ['--fake-arg'],
},
},
},
}
"""
SCRIPT_WITH_ARGS_SWARMING_EXCEPTIONS = """\
{
'foo_test': {
'modifications': {
'Fake Tester': {
'swarming': {
'value': 'exception',
},
},
},
},
}
"""
NO_BAR_TEST_EXCEPTIONS = """\
{
'bar_test': {
'remove_from': [
'Fake Tester',
]
}
}
"""
EMPTY_BAR_TEST_EXCEPTIONS = """\
{
'bar_test': {
}
}
"""
EXCEPTIONS_SORTED = """\
{
'suite_c': {
'modifications': {
'Fake Tester': {
'foo': 'bar',
},
},
},
'suite_d': {
'modifications': {
'Fake Tester': {
'foo': 'baz',
},
},
},
}
"""
EXCEPTIONS_UNSORTED = """\
{
'suite_d': {
'modifications': {
'Fake Tester': {
'foo': 'baz',
},
},
},
'suite_c': {
'modifications': {
'Fake Tester': {
'foo': 'bar',
},
},
},
}
"""
EXCEPTIONS_PER_TEST_UNSORTED = """\
{
'suite_d': {
'modifications': {
'Other Tester': {
'foo': 'baz',
},
'Fake Tester': {
'foo': 'baz',
},
},
},
}
"""
EXCEPTIONS_DUPS_REMOVE_FROM = """\
{
'suite_d': {
'remove_from': [
'Fake Tester',
'Fake Tester',
],
'modifications': {
'Fake Tester': {
'foo': 'baz',
},
},
},
}
"""
FOO_TEST_MODIFICATIONS = """\
{
'foo_test': {
'modifications': {
'Fake Tester': {
'args': [
'--bar',
],
'swarming': {
'hard_timeout': 600,
},
},
},
}
}
"""
FOO_TEST_EXPLICIT_NONE_EXCEPTIONS = """\
{
'foo_test': {
'modifications': {
'Fake Tester': {
'swarming': {
'dimension_sets': [
{
'integrity': None,
},
],
},
},
},
},
}
"""
NONEXISTENT_REMOVAL = """\
{
'foo_test': {
'remove_from': [
'Nonexistent Tester',
]
}
}
"""
NONEXISTENT_MODIFICATION = """\
{
'foo_test': {
'modifications': {
'Nonexistent Tester': {
'args': [],
},
},
}
}
"""
COMPOSITION_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "bar_test"
},
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
COMPOSITION_WATERFALL_WITH_ARGS_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--this-is-an-argument"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "bar_test"
},
{
"args": [
"--this-is-an-argument"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
VARIATION_GTEST_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test",
"test_id_prefix": "ninja://chrome/test:foo_test/"
},
{
"args": [
"--variation"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "variation_test",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test",
"test_id_prefix": "ninja://chrome/test:foo_test/"
}
]
}
}
"""
FOO_WATERFALL_GTEST_ISOLATED_SCRIPT_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"script": "foo.py",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test",
"test_id_prefix": "ninja://chrome/test:foo_test/",
"use_isolated_scripts_api": true
}
]
}
}
"""
COMPOSITION_WATERFALL_FILTERED_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
MERGED_ARGS_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--c_arg",
"--bar"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
],
"hard_timeout": 600
},
"test": "foo_test"
}
]
}
}
"""
LINUX_ARGS_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--no-xvfb"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
MERGED_ENABLE_FEATURES_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--enable-features=Foo,Bar,Baz"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
MODIFIED_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--bar"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high",
"kvm": "1"
}
],
"expiration": 120,
"hard_timeout": 600
},
"test": "foo_test"
}
]
}
}
"""
EXPLICIT_NONE_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
],
"expiration": 120
},
"test": "foo_test"
}
]
}
}
"""
ISOLATED_SCRIPT_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"isolate_name": "foo_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_test",
"swarming": {
"can_use_on_swarming_builders": true
}
}
]
}
}
"""
ISOLATED_SCRIPT_OUTPUT_ANDROID = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"--gs-results-bucket=chromium-result-details"
],
"isolate_name": "foo_test",
"merge": {
"args": [
"--bucket",
"chromium-result-details",
"--test-name",
"foo_test"
],
"script": \
"//build/android/pylib/results/presentation/test_results_presentation.py"
},
"name": "foo_test",
"swarming": {
"can_use_on_swarming_builders": true,
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": \
"git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"output_links": [
{
"link": [
"https://luci-logdog.appspot.com/v/?s",
"=android%2Fswarming%2Flogcats%2F",
"${TASK_ID}%2F%2B%2Funified_logcats"
],
"name": "shard #${SHARD_INDEX} logcats"
}
]
}
}
]
}
}
"""
SCRIPT_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"scripts": [
{
"name": "foo_test",
"script": "foo.py"
}
]
}
}
"""
SCRIPT_WITH_ARGS_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"scripts": [
{
"args": [
"--fake-arg"
],
"name": "foo_test",
"script": "foo.py"
}
]
}
}
"""
JUNIT_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"junit_tests": [
{
"name": "foo_test",
"test": "foo_test"
}
]
}
}
"""
GPU_TELEMETRY_TEST_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"foo",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_tests",
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:1cb3"
}
],
"idempotent": false
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
}
]
}
}
"""
NVIDIA_GPU_TELEMETRY_TEST_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"foo",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc",
"--gpu-vendor-id",
"10de",
"--gpu-device-id",
"1cb3"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_tests",
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "10de:1cb3-26.21.14.3102"
}
],
"idempotent": false
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
}
]
}
}
"""
INTEL_GPU_TELEMETRY_TEST_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"foo",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc",
"--gpu-vendor-id",
"8086",
"--gpu-device-id",
"5912"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_tests",
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "8086:5912-24.20.100.6286"
}
],
"idempotent": false
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
}
]
}
}
"""
INTEL_UHD_GPU_TELEMETRY_TEST_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"foo",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc",
"--gpu-vendor-id",
"8086",
"--gpu-device-id",
"3e92"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_tests",
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"gpu": "8086:3e92-24.20.100.6286"
}
],
"idempotent": false
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
}
]
}
}
"""
ANDROID_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Android Builder": {
"additional_compile_targets": [
"bar_test"
]
},
"Fake Android K Tester": {
"additional_compile_targets": [
"bar_test"
],
"gtest_tests": [
{
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": \
"git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "KTU84P",
"device_os_type": "userdebug",
"device_type": "hammerhead",
"integrity": "high",
"os": "Android"
}
],
"expiration": 120,
"output_links": [
{
"link": [
"https://luci-logdog.appspot.com/v/?s",
"=android%2Fswarming%2Flogcats%2F",
"${TASK_ID}%2F%2B%2Funified_logcats"
],
"name": "shard #${SHARD_INDEX} logcats"
}
]
},
"test": "foo_test"
}
]
},
"Fake Android L Tester": {
"gtest_tests": [
{
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": \
"git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "LMY41U",
"device_os_type": "user",
"device_type": "hammerhead",
"integrity": "high",
"os": "Android"
}
],
"expiration": 120
},
"test": "foo_test"
}
]
},
"Fake Android M Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": false
},
"test": "foo_test"
}
]
}
}
"""
CHROMEOS_TRIGGER_SCRIPT_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"device_type": "foo_device",
"integrity": "high"
}
],
"expiration": 120
},
"test": "foo_test",
"trigger_script": {
"script": "//testing/trigger_scripts/chromeos_device_trigger.py"
}
}
]
}
}
"""
GPU_DIMENSIONS_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"isolated_scripts": [
{
"args": [
"foo_test",
"--show-stdout",
"--browser=release",
"--passthrough",
"-v",
"--extra-browser-args=--enable-logging=stderr --js-flags=--expose-gc"
],
"isolate_name": "telemetry_gpu_integration_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_isolated_script_merge.py"
},
"name": "foo_test",
"should_retry_with_patch": false,
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"iama": "mixin",
"integrity": "high"
}
],
"expiration": 120,
"idempotent": false,
"value": "test"
},
"test_id_prefix": "ninja://chrome/test:telemetry_gpu_integration_test/"
}
]
}
}
"""
LUCI_MILO_CFG = """\
consoles {
builders {
name: "buildbucket/luci.chromium.ci/Fake Tester"
}
}
"""
LUCI_MILO_CFG_WATERFALL_SORTING = """\
consoles {
builders {
name: "buildbucket/luci.chromium.ci/Fake Tester"
name: "buildbucket/luci.chromium.ci/Really Fake Tester"
}
}
"""
TEST_SUITE_SORTING_WATERFALL = """
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
]
"""
TEST_SUITE_SORTED_WATERFALL = """
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.zz.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
]
"""
TEST_SUITE_UNSORTED_WATERFALL_1 = """
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.zz.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
]
"""
TEST_SUITE_UNSORTED_WATERFALL_2 = """
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.zz.test',
'machines': {
'Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
'Really Fake Tester': {
'test_suites': {
'gtest_tests': 'suite_a',
'scripts': 'suite_b',
},
},
},
},
]
"""
# Note that the suites in basic_suites would be sorted after the suites in
# compound_suites. This is valid though, because each set of suites is sorted
# separately.
# suite_c is a 'gtest_tests' test
# suite_d is a 'scripts' test
TEST_SUITE_SORTED = """\
{
'basic_suites': {
'suite_c': {
'suite_c': {},
},
'suite_d': {
'script': {
'script': 'suite_d.py',
}
},
},
'compound_suites': {
'suite_a': [
'suite_c',
],
'suite_b': [
'suite_d',
],
},
}
"""
TEST_SUITE_UNSORTED_1 = """\
{
'basic_suites': {
'suite_d': {
'a': 'b',
},
'suite_c': {
'a': 'b',
},
},
'compound_suites': {
'suite_a': [
'suite_c',
],
'suite_b': [
'suite_d',
],
},
}
"""
TEST_SUITE_UNSORTED_2 = """\
{
'basic_suites': {
'suite_c': {
'a': 'b',
},
'suite_d': {
'a': 'b',
},
},
'compound_suites': {
'suite_b': [
'suite_c',
],
'suite_a': [
'suite_d',
],
},
}
"""
TEST_SUITE_UNSORTED_3 = """\
{
'basic_suites': {
'suite_d': {
'a': 'b',
},
'suite_c': {
'a': 'b',
},
},
'compound_suites': {
'suite_b': [
'suite_c',
],
'suite_a': [
'suite_d',
],
},
}
"""
TEST_SUITES_SYNTAX_ERROR = """\
{
'basic_suites': {
3: {
'suite_c': {},
},
},
'compound_suites': {},
}
"""
GN_ISOLATE_MAP="""\
{
'foo_test': {
'label': '//chrome/test:foo_test',
'type': 'windowed_test_launcher',
}
}
"""
GPU_TELEMETRY_GN_ISOLATE_MAP="""\
{
'telemetry_gpu_integration_test': {
'label': '//chrome/test:telemetry_gpu_integration_test',
'type': 'script',
}
}
"""
GN_ISOLATE_MAP_KEY_LABEL_MISMATCH="""\
{
'foo_test': {
'label': '//chrome/test:foo_test_tmp',
'type': 'windowed_test_launcher',
}
}
"""
GN_ISOLATE_MAP_USING_IMPLICIT_NAME="""\
{
'foo_test': {
'label': '//chrome/foo_test',
'type': 'windowed_test_launcher',
}
}
"""
class UnitTest(TestCase):
def test_base_generator(self):
# Only needed for complete code coverage.
self.assertRaises(NotImplementedError,
generate_buildbot_json.BaseGenerator(None).generate,
None, None, None, None)
self.assertRaises(NotImplementedError,
generate_buildbot_json.BaseGenerator(None).sort,
None)
def test_good_test_suites_are_ok(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL, FOO_TEST_SUITE,
LUCI_MILO_CFG)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_good_composition_test_suites_are_ok(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_bad_composition_test_suites_are_caught(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WATERFALL,
BAD_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'compound_suites may not refer to.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_composition_test_suites_no_conflicts(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WATERFALL,
CONFLICTING_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Conflicting test definitions.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_composition_test_suites_no_duplicate_names(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WATERFALL,
DUPLICATES_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'.*may not duplicate basic test suite.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_unknown_test_suites_are_caught(self):
fbb = FakeBBGen(self.args, UNKNOWN_TEST_SUITE_WATERFALL, FOO_TEST_SUITE,
LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Test suite baz_tests from machine.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_unknown_test_suite_types_are_caught(self):
fbb = FakeBBGen(self.args, UNKNOWN_TEST_SUITE_TYPE_WATERFALL,
FOO_TEST_SUITE, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Unknown test suite type foo_test_type.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_unrefed_test_suite_caught(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL, UNREFED_TEST_SUITE,
LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'.*unreferenced.*bar_tests.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_good_waterfall_output(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
COMPOSITION_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_reusing_gtest_targets(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
REUSING_TEST_WITH_DIFFERENT_NAME,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
VARIATION_GTEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_load_multiple_isolate_map_files_with_duplicates(self):
self.args.isolate_map_files = ['gn_isolate_map.pyl']
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
REUSING_TEST_WITH_DIFFERENT_NAME,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Duplicate targets in isolate map files.*'):
fbb.load_configuration_files()
def test_load_multiple_isolate_map_files_without_duplicates(self):
self.args.isolate_map_files = ['gn_isolate_map2.pyl']
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
REUSING_TEST_WITH_DIFFERENT_NAME,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP)
fbb.load_configuration_files()
isolate_dict = {}
isolate_map_1 = fbb.load_pyl_file('gn_isolate_map.pyl')
isolate_map_2 = fbb.load_pyl_file('gn_isolate_map2.pyl')
isolate_dict.update(isolate_map_1)
isolate_dict.update(isolate_map_2)
self.assertEquals(isolate_dict, fbb.gn_isolate_map)
def test_gn_isolate_map_with_label_mismatch(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP_KEY_LABEL_MISMATCH)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'key name.*foo_test.*label.*'
'foo_test_tmp.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_gn_isolate_map_using_implicit_gn_name(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP_USING_IMPLICIT_NAME)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Malformed.*//chrome/foo_test.*for key.*'
'foo_test.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_noop_exception_does_nothing(self):
fbb = FakeBBGen(self.args,
COMPOSITION_GTEST_SUITE_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=EMPTY_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
COMPOSITION_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_test_arg_merges(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ARGS,
LUCI_MILO_CFG,
exceptions=FOO_TEST_MODIFICATIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
MERGED_ARGS_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_enable_features_arg_merges(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WITH_ENABLE_FEATURES_WATERFALL,
FOO_TEST_SUITE_WITH_ENABLE_FEATURES, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
MERGED_ENABLE_FEATURES_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_linux_args(self):
fbb = FakeBBGen(self.args, FOO_LINUX_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_LINUX_ARGS, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
LINUX_ARGS_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_test_filtering(self):
fbb = FakeBBGen(self.args,
COMPOSITION_GTEST_SUITE_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file(
'chromium.test.json', COMPOSITION_WATERFALL_FILTERED_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_test_modifications(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=FOO_TEST_MODIFICATIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
MODIFIED_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json', MODIFIED_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_test_with_explicit_none(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=FOO_TEST_EXPLICIT_NONE_EXCEPTIONS,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
EXPLICIT_NONE_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
EXPLICIT_NONE_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_isolated_script_tests(self):
fbb = FakeBBGen(self.args,
FOO_ISOLATED_SCRIPTS_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
ISOLATED_SCRIPT_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
ISOLATED_SCRIPT_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_isolated_script_tests(self):
fbb = FakeBBGen(self.args,
FOO_ISOLATED_SCRIPTS_WATERFALL_ANDROID,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
ISOLATED_SCRIPT_OUTPUT_ANDROID)
self.create_testing_buildbot_json_file('chromium.ci.json',
ISOLATED_SCRIPT_OUTPUT_ANDROID)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_script_with_args(self):
fbb = FakeBBGen(self.args,
FOO_SCRIPT_WATERFALL,
SCRIPT_SUITE,
LUCI_MILO_CFG,
exceptions=SCRIPT_WITH_ARGS_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
SCRIPT_WITH_ARGS_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
SCRIPT_WITH_ARGS_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_script(self):
fbb = FakeBBGen(self.args,
FOO_SCRIPT_WATERFALL,
FOO_SCRIPT_SUITE,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json', SCRIPT_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json', SCRIPT_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_script_machine_forbids_scripts(self):
fbb = FakeBBGen(self.args,
FOO_SCRIPT_WATERFALL_MACHINE_FORBIDS_SCRIPT_TESTS,
FOO_SCRIPT_SUITE,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Attempted to generate a script test on tester.*'):
fbb.check_output_file_consistency(verbose=True)
def test_script_waterfall_forbids_scripts(self):
fbb = FakeBBGen(self.args,
FOO_SCRIPT_WATERFALL_FORBID_SCRIPT_TESTS,
FOO_SCRIPT_SUITE,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Attempted to generate a script test on tester.*'):
fbb.check_output_file_consistency(verbose=True)
def test_junit_tests(self):
fbb = FakeBBGen(self.args,
FOO_JUNIT_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json', JUNIT_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json', JUNIT_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_gpu_telemetry_tests(self):
fbb = FakeBBGen(self.args,
FOO_GPU_TELEMETRY_TEST_WATERFALL,
COMPOSITION_SUITE_WITH_NAME_NOT_ENDING_IN_TEST,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS,
gn_isolate_map=GPU_TELEMETRY_GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
GPU_TELEMETRY_TEST_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
GPU_TELEMETRY_TEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nvidia_gpu_telemetry_tests(self):
fbb = FakeBBGen(self.args,
NVIDIA_GPU_TELEMETRY_TEST_WATERFALL,
COMPOSITION_SUITE_WITH_GPU_ARGS,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS,
gn_isolate_map=GPU_TELEMETRY_GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
NVIDIA_GPU_TELEMETRY_TEST_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
NVIDIA_GPU_TELEMETRY_TEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_intel_gpu_telemetry_tests(self):
fbb = FakeBBGen(self.args,
INTEL_GPU_TELEMETRY_TEST_WATERFALL,
COMPOSITION_SUITE_WITH_GPU_ARGS,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS,
gn_isolate_map=GPU_TELEMETRY_GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
INTEL_GPU_TELEMETRY_TEST_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
INTEL_GPU_TELEMETRY_TEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_intel_uhd_gpu_telemetry_tests(self):
fbb = FakeBBGen(self.args,
INTEL_UHD_GPU_TELEMETRY_TEST_WATERFALL,
COMPOSITION_SUITE_WITH_GPU_ARGS,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS,
gn_isolate_map=GPU_TELEMETRY_GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
INTEL_UHD_GPU_TELEMETRY_TEST_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
INTEL_UHD_GPU_TELEMETRY_TEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_gtest_as_isolated_Script(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
GTEST_AS_ISOLATED_SCRIPT_SUITE,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file(
'chromium.test.json', FOO_WATERFALL_GTEST_ISOLATED_SCRIPT_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', FOO_WATERFALL_GTEST_ISOLATED_SCRIPT_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_ungenerated_output_files_are_caught(self):
fbb = FakeBBGen(self.args,
COMPOSITION_GTEST_SUITE_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
exceptions=NO_BAR_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file(
'chromium.test.json', '\n' + COMPOSITION_WATERFALL_FILTERED_OUTPUT)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_output_file_consistency(verbose=True, dump=False)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, 'File chromium.test.json did not have the following'
' expected contents:.*')
self.assertRegexpMatches(joined_lines, '.*--- expected.*')
self.assertRegexpMatches(joined_lines, '.*\+\+\+ current.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_android_output_options(self):
fbb = FakeBBGen(self.args, ANDROID_WATERFALL, FOO_TEST_SUITE, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
ANDROID_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
ANDROID_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nonexistent_removal_raises(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=NONEXISTENT_REMOVAL)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'The following nonexistent machines.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nonexistent_modification_raises(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=NONEXISTENT_MODIFICATION)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'The following nonexistent machines.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_waterfall_args(self):
fbb = FakeBBGen(self.args, COMPOSITION_GTEST_SUITE_WITH_ARGS_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file(
'chromium.test.json', COMPOSITION_WATERFALL_WITH_ARGS_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', COMPOSITION_WATERFALL_WITH_ARGS_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_chromeos_trigger_script_output(self):
fbb = FakeBBGen(self.args, FOO_CHROMEOS_TRIGGER_SCRIPT_WATERFALL,
FOO_TEST_SUITE, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
CHROMEOS_TRIGGER_SCRIPT_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
CHROMEOS_TRIGGER_SCRIPT_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_relative_pyl_file_dir(self):
self.override_args(pyl_files_dir='relative/path/', waterfall_filters=[])
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
REUSING_TEST_WITH_DIFFERENT_NAME,
LUCI_MILO_CFG,
gn_isolate_map=GN_ISOLATE_MAP)
fbb.check_input_file_consistency(verbose=True)
self.create_testing_buildbot_json_file('relative/path/chromium.test.json',
VARIATION_GTEST_OUTPUT)
self.create_testing_buildbot_json_file('relative/path/chromium.ci.json',
VARIATION_GTEST_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nonexistent_bot_raises(self):
fbb = FakeBBGen(self.args, UNKNOWN_BOT_GTESTS_WATERFALL, FOO_TEST_SUITE,
LUCI_MILO_CFG)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nonexistent_bot_raises_when_no_project_pyl_exists(self):
fbb = FakeBBGen(self.args,
UNKNOWN_BOT_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
project_pyl=None)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_nonexistent_bot_does_not_raise_when_validation_disabled(self):
fbb = FakeBBGen(
self.args,
UNKNOWN_BOT_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
project_pyl='{"validate_source_side_specs_have_builder": False}')
fbb.check_input_file_consistency(verbose=True)
def test_waterfalls_must_be_sorted(self):
fbb = FakeBBGen(self.args, TEST_SUITE_SORTED_WATERFALL, TEST_SUITE_SORTED,
LUCI_MILO_CFG_WATERFALL_SORTING)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args, TEST_SUITE_UNSORTED_WATERFALL_1,
TEST_SUITE_SORTED, LUCI_MILO_CFG_WATERFALL_SORTING)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'The following files have invalid keys: waterfalls.pyl'):
fbb.check_input_file_consistency(verbose=True)
joined_lines = '\n'.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+ chromium\..*test.*')
self.assertRegexpMatches(
joined_lines, '.*\- chromium\..*test.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args, TEST_SUITE_UNSORTED_WATERFALL_2,
TEST_SUITE_SORTED, LUCI_MILO_CFG_WATERFALL_SORTING)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'The following files have invalid keys: waterfalls.pyl'):
fbb.check_input_file_consistency(verbose=True)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+.*Fake Tester.*')
self.assertRegexpMatches(
joined_lines, '.*\-.*Fake Tester.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_test_suite_exceptions_must_be_sorted(self):
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_SORTED)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_DUPS_REMOVE_FROM)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\- Fake Tester.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_test_suite_exceptions_no_dups_remove_from(self):
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_SORTED)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_PER_TEST_UNSORTED)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+ Fake Tester.*')
self.assertRegexpMatches(
joined_lines, '.*\- Fake Tester.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_test_suite_exceptions_per_test_must_be_sorted(self):
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_SORTED)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args,
TEST_SUITE_SORTING_WATERFALL,
TEST_SUITE_SORTED,
LUCI_MILO_CFG,
exceptions=EXCEPTIONS_UNSORTED)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+ suite_.*')
self.assertRegexpMatches(
joined_lines, '.*\- suite_.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_test_suites_must_be_sorted(self):
fbb = FakeBBGen(self.args, TEST_SUITE_SORTING_WATERFALL, TEST_SUITE_SORTED,
LUCI_MILO_CFG)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
for unsorted in (
TEST_SUITE_UNSORTED_1,
TEST_SUITE_UNSORTED_2,
TEST_SUITE_UNSORTED_3,
):
fbb = FakeBBGen(self.args, TEST_SUITE_SORTING_WATERFALL, unsorted,
LUCI_MILO_CFG)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
joined_lines = ' '.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+ suite_.*')
self.assertRegexpMatches(
joined_lines, '.*\- suite_.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
FOO_GTESTS_WATERFALL_MIXIN_WATERFALL = """\
[
{
'mixins': ['waterfall_mixin'],
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_BUILDER_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['builder_mixin'],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_DIMENSION_SETS_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': [
'dimension_set_mixin_1',
'dimension_set_mixin_2',
'duplicate_dimension_set_mixin_1',
'dimension_mixin',
],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_WATERFALL_MIXIN_BUILDER_REMOVE_MIXIN_WATERFALL = """\
[
{
'mixins': ['waterfall_mixin'],
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'remove_mixins': ['waterfall_mixin'],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_BUILDER_MIXIN_NON_SWARMING_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['random_mixin'],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_DIMENSIONS_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['dimension_mixin'],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GPU_TELEMETRY_TEST_DIMENSIONS_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['dimension_mixin'],
'os_type': 'win',
'browser_config': 'release',
'test_suites': {
'gpu_telemetry_tests': 'foo_tests',
},
},
},
},
]
"""
# Swarming mixins must be a list, a single string is not allowed.
FOO_GTESTS_INVALID_LIST_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': 'dimension_mixin',
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_INVALID_NOTFOUND_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'mixins': ['nonexistant'],
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_TEST_MIXIN_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'mixins': ['waterfall_mixin'],
'machines': {
'Fake Tester': {
'swarming': {},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_GTESTS_SORTING_MIXINS_WATERFALL = """\
[
{
'mixins': ['a_mixin', 'b_mixin', 'c_mixin'],
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'swarming': {
'dimension_sets': [
{
'kvm': '1',
},
],
},
'test_suites': {
'gtest_tests': 'foo_tests',
},
},
},
},
]
"""
FOO_TEST_SUITE_WITH_MIXIN = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'swarming': {
'dimension_sets': [
{
'integrity': 'high',
}
],
'expiration': 120,
},
'mixins': ['test_mixin'],
},
},
},
}
"""
# These mixins are invalid; if passed to check_input_file_consistency, they will
# fail. These are used for output file consistency checks.
SWARMING_MIXINS = """\
{
'builder_mixin': {
'swarming': {
'value': 'builder',
},
},
'dimension_mixin': {
'swarming': {
'dimensions': {
'iama': 'mixin',
},
},
},
'random_mixin': {
'value': 'random',
},
'test_mixin': {
'swarming': {
'value': 'test',
},
},
'waterfall_mixin': {
'swarming': {
'value': 'waterfall',
},
},
}
"""
SWARMING_MIXINS_APPEND = """\
{
'builder_mixin': {
'$mixin_append': {
'args': [ '--mixin-argument' ],
},
},
}
"""
SWARMING_MIXINS_APPEND_NOT_LIST = """\
{
'builder_mixin': {
'$mixin_append': {
'args': 'I am not a list',
},
},
}
"""
SWARMING_MIXINS_APPEND_TO_SWARMING = """\
{
'builder_mixin': {
'$mixin_append': {
'swarming': [ 'swarming!' ],
},
},
}
"""
SWARMING_MIXINS_DIMENSION_SETS = """\
{
'dimension_set_mixin_1': {
'swarming': {
'dimension_sets': [
{
'value': 'ds1',
},
],
},
},
'dimension_set_mixin_2': {
'swarming': {
'dimension_sets': [
{
'value': 'ds2',
},
],
},
},
'duplicate_dimension_set_mixin_1': {
'swarming': {
'dimension_sets': [
{
'value': 'ds1',
},
],
},
},
'dimension_mixin': {
'swarming': {
'dimensions': {
'other_value': 'dimension_mixin',
},
},
},
}
"""
SWARMING_MIXINS_DUPLICATED = """\
{
'builder_mixin': {
'value': 'builder',
},
'builder_mixin': {
'value': 'builder',
},
}
"""
SWARMING_MIXINS_UNSORTED = """\
{
'b_mixin': {
'b': 'b',
},
'a_mixin': {
'a': 'a',
},
'c_mixin': {
'c': 'c',
},
}
"""
SWARMING_MIXINS_SORTED = """\
{
'a_mixin': {
'a': 'a',
},
'b_mixin': {
'b': 'b',
},
'c_mixin': {
'c': 'c',
},
}
"""
WATERFALL_DIMENSION_SETS_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"other_value": "dimension_mixin",
"value": "ds1"
},
{
"other_value": "dimension_mixin",
"value": "ds2"
}
]
},
"test": "foo_test"
}
]
}
}
"""
WATERFALL_MIXIN_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high"
}
],
"expiration": 120,
"value": "waterfall"
},
"test": "foo_test"
}
]
}
}
"""
WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high"
}
],
"expiration": 120
},
"test": "foo_test"
}
]
}
}
"""
WATERFALL_MIXIN_WATERFALL_EXCEPTION_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high"
}
],
"expiration": 120,
"value": "exception"
},
"test": "foo_test"
}
]
}
}
"""
BUILDER_MIXIN_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high"
}
],
"expiration": 120,
"value": "builder"
},
"test": "foo_test"
}
]
}
}
"""
BUILDER_MIXIN_NON_SWARMING_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high"
}
],
"expiration": 120
},
"test": "foo_test",
"value": "random"
}
]
}
}
"""
BUILDER_MIXIN_APPEND_ARGS_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--c_arg",
"--mixin-argument"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test"
}
]
}
}
"""
TEST_MIXIN_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"integrity": "high",
"kvm": "1"
}
],
"expiration": 120,
"value": "test"
},
"test": "foo_test"
}
]
}
}
"""
DIMENSIONS_MIXIN_WATERFALL_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"iama": "mixin",
"integrity": "high"
}
],
"expiration": 120,
"value": "test"
},
"test": "foo_test"
}
]
}
}
"""
class MixinTests(TestCase):
"""Tests for the mixins feature."""
def test_mixins_must_be_sorted(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_SORTING_MIXINS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
fbb = FakeBBGen(self.args,
FOO_GTESTS_SORTING_MIXINS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_UNSORTED)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_input_file_consistency(verbose=True)
joined_lines = '\n'.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\+ ._mixin.*')
self.assertRegexpMatches(
joined_lines, '.*\- ._mixin.*')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_waterfall(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
WATERFALL_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
WATERFALL_MIXIN_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_waterfall_exception_overrides(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=SCRIPT_WITH_ARGS_SWARMING_EXCEPTIONS,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', WATERFALL_MIXIN_WATERFALL_EXCEPTION_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', WATERFALL_MIXIN_WATERFALL_EXCEPTION_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_builder(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
BUILDER_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
BUILDER_MIXIN_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_builder_non_swarming(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_NON_SWARMING_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', BUILDER_MIXIN_NON_SWARMING_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', BUILDER_MIXIN_NON_SWARMING_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_test_suite(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
TEST_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
TEST_MIXIN_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_dimension(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_DIMENSIONS_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_dimension_gpu(self):
fbb = FakeBBGen(self.args,
FOO_GPU_TELEMETRY_TEST_DIMENSIONS_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS,
gn_isolate_map=GPU_TELEMETRY_GN_ISOLATE_MAP)
self.create_testing_buildbot_json_file('chromium.test.json',
GPU_DIMENSIONS_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
GPU_DIMENSIONS_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_unreferenced(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'.*mixins are unreferenced.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_unused(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_INVALID_NOTFOUND_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_list(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_INVALID_LIST_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file('chromium.test.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
DIMENSIONS_MIXIN_WATERFALL_OUTPUT)
with self.assertRaises(generate_buildbot_json.BBGenErr):
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_no_duplicate_keys(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_DUPLICATED)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'The following files have invalid keys: mixins.pyl'):
fbb.check_input_file_consistency(verbose=True)
joined_lines = '\n'.join(fbb.printed_lines)
self.assertRegexpMatches(
joined_lines, '.*\- builder_mixin')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_no_duplicate_keys_basic_test_suite(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL, FOO_TEST_SUITE_NOT_SORTED,
LUCI_MILO_CFG)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'The following files have invalid keys: test_suites.pyl'):
fbb.check_input_file_consistency(verbose=True)
joined_lines = '\n'.join(fbb.printed_lines)
self.assertRegexpMatches(joined_lines, '.*\- a_test')
self.assertRegexpMatches(joined_lines, '.*\+ a_test')
fbb.printed_lines = []
self.assertFalse(fbb.printed_lines)
def test_type_assert_printing_help(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL, TEST_SUITES_SYNTAX_ERROR,
LUCI_MILO_CFG)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'Invalid \.pyl file \'test_suites.pyl\'.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertEquals(
fbb.printed_lines, [
'== test_suites.pyl ==',
'<snip>',
'1 {',
"2 'basic_suites': {",
'--------------------------------------------------------------------'
'------------',
'3 3: {',
'-------^------------------------------------------------------------'
'------------',
"4 'suite_c': {},",
'5 },',
'<snip>',
])
def test_mixin_append_args(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_ARGS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_APPEND)
self.create_testing_buildbot_json_file(
'chromium.test.json', BUILDER_MIXIN_APPEND_ARGS_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', BUILDER_MIXIN_APPEND_ARGS_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_mixin_append_mixin_field_not_list(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_ARGS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_APPEND_NOT_LIST)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'Key "args" in \$mixin_append must be a list.'):
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_mixin_append_test_field_not_list(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_APPEND_TO_SWARMING)
with self.assertRaisesRegexp(
generate_buildbot_json.BBGenErr,
'Cannot apply \$mixin_append to non-list "swarming".'):
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_remove_mixin_builder_remove_waterfall(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL_MIXIN_BUILDER_REMOVE_MIXIN_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_remove_mixin_test_remove_waterfall(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_REMOVE_WATERFALL_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_remove_mixin_test_remove_builder(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_BUILDER_MIXIN_WATERFALL,
FOO_TEST_SUITE_WITH_REMOVE_BUILDER_MIXIN,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', WATERFALL_MIXIN_REMOVE_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_dimension_sets_application(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_DIMENSION_SETS_MIXIN_WATERFALL,
FOO_TEST_SUITE_NO_DIMENSIONS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_DIMENSION_SETS)
self.create_testing_buildbot_json_file(
'chromium.test.json', WATERFALL_DIMENSION_SETS_WATERFALL_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', WATERFALL_DIMENSION_SETS_WATERFALL_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
TEST_SUITE_WITH_PARAMS = """\
{
'basic_suites': {
'bar_tests': {
'bar_test': {
'args': ['--no-xvfb'],
'swarming': {
'dimension_sets': [
{
'device_os': 'NMF26U'
}
],
},
'should_retry_with_patch': False,
'name': 'bar_test'
},
'bar_test_test': {
'swarming': {
'dimension_sets': [
{
'kvm': '1'
}
],
'hard_timeout': 1000
},
'should_retry_with_patch': True
}
},
'foo_tests': {
'foo_test_empty': {},
'foo_test': {
'args': [
'--jobs=1',
'--verbose'
],
'swarming': {
'dimension_sets': [
{
'device_os': 'MMB29Q'
}
],
'hard_timeout': 1800
}
},
'foo_test_test': {
'swarming': {
},
'name': 'pls'
},
},
},
'compound_suites': {
'composition_tests': [
'foo_tests',
'bar_tests',
],
},
}
"""
TEST_QUERY_BOTS_OUTPUT = {
"Fake Android M Tester": {
"gtest_tests": [
{
"test": "foo_test",
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": False
}
}
]
},
"Fake Android L Tester": {
"gtest_tests": [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision":
"git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets":[
{
"device_os": "LMY41U",
"device_os_type": "user",
"device_type": "hammerhead",
'os': 'Android'
}
],
"can_use_on_swarming_builders": True
}
}
]
},
"Fake Android K Tester": {
"additional_compile_targets": ["bar_test"],
"gtest_tests": [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision":
"git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "KTU84P",
"device_os_type": "userdebug",
"device_type": "hammerhead",
"os": "Android",
}
],
"can_use_on_swarming_builders": True,
"output_links": [
{
"link": ["https://luci-logdog.appspot.com/v/?s",
"=android%2Fswarming%2Flogcats%2F",
"${TASK_ID}%2F%2B%2Funified_logcats"],
"name": "shard #${SHARD_INDEX} logcats"
}
]
}
}
]
},
"Android Builder": {
"additional_compile_targets": ["bar_test"]
}
}
TEST_QUERY_BOTS_TESTS_OUTPUT = {
"Fake Android M Tester": [
{
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"test": "foo_test",
"swarming": {
"can_use_on_swarming_builders": False
}
}
],
"Fake Android L Tester": [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "LMY41U",
"device_os_type": "user",
"device_type": "hammerhead",
"os": "Android"
}
],
"can_use_on_swarming_builders": True
}
}
],
"Android Builder": [],
"Fake Android K Tester": [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "KTU84P",
"device_os_type": "userdebug",
"device_type": "hammerhead",
"os": "Android"
}
],
"can_use_on_swarming_builders": True,
"output_links": [
{
"link": [
"https://luci-logdog.appspot.com/v/?s",
"=android%2Fswarming%2Flogcats%2F",
"${TASK_ID}%2F%2B%2Funified_logcats"
],
"name": "shard #${SHARD_INDEX} logcats"
}
]
}
}
]
}
TEST_QUERY_BOT_OUTPUT = {
"additional_compile_targets": ["bar_test"],
"gtest_tests": [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "KTU84P",
"device_os_type": "userdebug",
"device_type": "hammerhead",
"os": "Android"
}
],
"can_use_on_swarming_builders": True,
"output_links": [
{
"link": ["https://luci-logdog.appspot.com/v/?s",
"=android%2Fswarming%2Flogcats%2F",
"${TASK_ID}%2F%2B%2Funified_logcats"
],
"name": "shard #${SHARD_INDEX} logcats"
}
]
}
}
]
}
TEST_QUERY_BOT_TESTS_OUTPUT = [
{
"test": "foo_test",
"args": [
"--gs-results-bucket=chromium-result-details",
"--recover-devices"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"cipd_packages": [
{
"cipd_package": "infra/tools/luci/logdog/butler/${platform}",
"location": "bin",
"revision": "git_revision:ff387eadf445b24c935f1cf7d6ddd279f8a6b04c"
}
],
"dimension_sets": [
{
"device_os": "LMY41U",
"device_os_type": "user",
"device_type": "hammerhead",
"os": "Android"
}
],
"can_use_on_swarming_builders": True
}
}
]
TEST_QUERY_TESTS_OUTPUT = {
"bar_test": {},
"foo_test": {}
}
TEST_QUERY_TESTS_MULTIPLE_PARAMS_OUTPUT = ["foo_test"]
TEST_QUERY_TESTS_DIMENSION_PARAMS_OUTPUT = ["bar_test"]
TEST_QUERY_TESTS_SWARMING_PARAMS_OUTPUT = ["bar_test_test"]
TEST_QUERY_TESTS_PARAMS_OUTPUT = ['bar_test_test']
TEST_QUERY_TESTS_PARAMS_FALSE_OUTPUT = ['bar_test']
TEST_QUERY_TEST_OUTPUT = {}
TEST_QUERY_TEST_BOTS_OUTPUT = [
"Fake Android M Tester",
"Fake Android L Tester",
"Fake Android K Tester"
]
TEST_QUERY_TEST_BOTS_ISOLATED_SCRIPTS_OUTPUT = ['Fake Tester']
TEST_QUERY_TEST_BOTS_NO_BOTS_OUTPUT = []
class QueryTests(TestCase):
"""Tests for the query feature."""
def test_query_bots(self):
self.override_args(query='bots',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_BOTS_OUTPUT)
def test_query_bots_invalid(self):
self.override_args(query='bots/blah/blah',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_bots_json(self):
self.override_args(query='bots',
check=False,
pyl_files_dir=None,
json='result.json',
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
self.assertFalse(fbb.printed_lines)
def test_query_bots_tests(self):
self.override_args(query='bots/tests',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_BOTS_TESTS_OUTPUT)
def test_query_invalid_bots_tests(self):
self.override_args(query='bots/tdfjdk',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_bot(self):
self.override_args(query='bot/Fake Android K Tester',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.maxDiff = None # pragma pylint: disable=attribute-defined-outside-init
self.assertEqual(query_json, TEST_QUERY_BOT_OUTPUT)
def test_query_bot_invalid_id(self):
self.override_args(query='bot/bot1',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_bot_invalid_query_too_many(self):
self.override_args(query='bot/Fake Android K Tester/blah/blah',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_bot_invalid_query_no_tests(self):
self.override_args(query='bot/Fake Android K Tester/blahs',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_bot_tests(self):
self.override_args(query='bot/Fake Android L Tester/tests',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_BOT_TESTS_OUTPUT)
def test_query_tests(self):
self.override_args(query='tests',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_OUTPUT)
def test_query_tests_invalid(self):
self.override_args(query='tests/blah/blah',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_tests_multiple_params(self):
self.override_args(query='tests/--jobs=1&--verbose',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_MULTIPLE_PARAMS_OUTPUT)
def test_query_tests_invalid_params(self):
self.override_args(query='tests/device_os?',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_tests_dimension_params(self):
self.override_args(query='tests/device_os:NMF26U',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_DIMENSION_PARAMS_OUTPUT)
def test_query_tests_swarming_params(self):
self.override_args(query='tests/hard_timeout:1000',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_SWARMING_PARAMS_OUTPUT)
def test_query_tests_params(self):
self.override_args(query='tests/should_retry_with_patch:true',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_PARAMS_OUTPUT)
def test_query_tests_params_false(self):
self.override_args(query='tests/should_retry_with_patch:false',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
TEST_SUITE_WITH_PARAMS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TESTS_PARAMS_FALSE_OUTPUT)
def test_query_test(self):
self.override_args(query='test/foo_test',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TEST_OUTPUT)
def test_query_test_invalid_id(self):
self.override_args(query='test/foo_foo',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_test_invalid_length(self):
self.override_args(query='test/foo_tests/foo/foo',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_test_bots(self):
self.override_args(query='test/foo_test/bots',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TEST_BOTS_OUTPUT)
def test_query_test_bots_isolated_scripts(self):
self.override_args(query='test/foo_test/bots',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
FOO_ISOLATED_SCRIPTS_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TEST_BOTS_ISOLATED_SCRIPTS_OUTPUT)
def test_query_test_bots_invalid(self):
self.override_args(query='test/foo_tests/foo',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
def test_query_test_bots_no_bots(self):
self.override_args(query='test/bar_tests/bots',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
fbb.query(fbb.args)
query_json = json.loads("".join(fbb.printed_lines))
self.assertEqual(query_json, TEST_QUERY_TEST_BOTS_NO_BOTS_OUTPUT)
def test_query_invalid(self):
self.override_args(query='foo',
check=False,
pyl_files_dir=None,
json=None,
waterfall_filters=[])
fbb = FakeBBGen(self.args,
ANDROID_WATERFALL,
GOOD_COMPOSITION_TEST_SUITES,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS_SORTED)
with self.assertRaises(SystemExit) as cm:
fbb.query(fbb.args)
self.assertEqual(cm.exception.code, 1)
self.assertTrue(fbb.printed_lines)
FOO_TEST_SUITE_WITH_ENABLE_FEATURES_SEPARATE_ENTRIES = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'args': [
'--enable-features',
'Foo,Bar',
],
},
},
},
}
"""
FOO_TEST_REPLACEMENTS_REMOVE_NO_VALUE = """\
{
'foo_test': {
'replacements': {
'Fake Tester': {
'args': {
'--c_arg': None,
},
},
},
},
}
"""
FOO_TEST_REPLACEMENTS_REMOVE_VALUE = """\
{
'foo_test': {
'replacements': {
'Fake Tester': {
'args': {
'--enable-features': None,
},
},
},
},
}
"""
FOO_TEST_REPLACEMENTS_REPLACE_VALUE = """\
{
'foo_test': {
'replacements': {
'Fake Tester': {
'args': {
'--enable-features': 'Bar,Baz',
},
},
},
},
}
"""
FOO_TEST_REPLACEMENTS_INVALID_KEY = """\
{
'foo_test': {
'replacements': {
'Fake Tester': {
'invalid': {
'--enable-features': 'Bar,Baz',
},
},
},
},
}
"""
REPLACEMENTS_REMOVE_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test"
}
]
}
}
"""
REPLACEMENTS_VALUE_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--enable-features=Bar,Baz"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test"
}
]
}
}
"""
REPLACEMENTS_VALUE_SEPARATE_ENTRIES_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--enable-features",
"Bar,Baz"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test"
}
]
}
}
"""
class ReplacementTests(TestCase):
"""Tests for the arg replacement feature."""
def test_replacement_valid_remove_no_value(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ARGS,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_REMOVE_NO_VALUE)
self.create_testing_buildbot_json_file('chromium.test.json',
REPLACEMENTS_REMOVE_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
REPLACEMENTS_REMOVE_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_replacement_valid_remove_value(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ENABLE_FEATURES,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_REMOVE_VALUE)
self.create_testing_buildbot_json_file('chromium.test.json',
REPLACEMENTS_REMOVE_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
REPLACEMENTS_REMOVE_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_replacement_valid_replace_value(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ENABLE_FEATURES,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_REPLACE_VALUE)
self.create_testing_buildbot_json_file('chromium.test.json',
REPLACEMENTS_VALUE_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
REPLACEMENTS_VALUE_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_replacement_valid_replace_value_separate_entries(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ENABLE_FEATURES_SEPARATE_ENTRIES,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_REPLACE_VALUE)
self.create_testing_buildbot_json_file(
'chromium.test.json', REPLACEMENTS_VALUE_SEPARATE_ENTRIES_OUTPUT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', REPLACEMENTS_VALUE_SEPARATE_ENTRIES_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_replacement_invalid_key_not_valid(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_INVALID_KEY)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Given replacement key *'):
fbb.check_output_file_consistency(verbose=True)
def test_replacement_invalid_key_not_found(self):
fbb = FakeBBGen(self.args,
FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_ARGS,
LUCI_MILO_CFG,
exceptions=FOO_TEST_REPLACEMENTS_REPLACE_VALUE)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Could not find *'):
fbb.check_output_file_consistency(verbose=True)
FOO_TEST_SUITE_WITH_MAGIC_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'args': [
'$$MAGIC_SUBSTITUTION_TestOnlySubstitution',
],
},
},
},
}
"""
FOO_TEST_SUITE_WITH_INVALID_MAGIC_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'args': [
'$$MAGIC_SUBSTITUTION_NotARealSubstitution',
],
},
},
},
}
"""
MAGIC_SUBSTITUTIONS_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--magic-substitution-success"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"kvm": "1"
}
]
},
"test": "foo_test"
}
]
}
}
"""
class MagicSubstitutionTests(TestCase):
"""Tests for the magic substitution feature."""
def test_valid_function(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_MAGIC_ARGS, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
MAGIC_SUBSTITUTIONS_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
MAGIC_SUBSTITUTIONS_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_invalid_function(self):
fbb = FakeBBGen(self.args, FOO_GTESTS_WATERFALL,
FOO_TEST_SUITE_WITH_INVALID_MAGIC_ARGS, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Magic substitution function *'):
fbb.check_output_file_consistency(verbose=True)
# Matrix compound composition test suites
MATRIX_COMPOUND_EMPTY = """\
{
'basic_suites': {
'bar_tests': {
'bar_test': {},
},
'foo_tests': {
'foo_test': {},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {},
'bar_tests': {},
},
},
}
"""
MATRIX_COMPOUND_MISSING_IDENTIFIER = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'swarming': {
'dimension_sets': [
{
'foo': 'bar',
},
],
},
},
],
},
},
},
}
"""
MATRIX_MISMATCHED_SWARMING_LENGTH = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {
'swarming': {
'dimension_sets': [
{
'hello': 'world',
}
],
},
},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'identifier': 'test',
'swarming': {
'dimension_sets': [
{
'foo': 'bar',
},
{
'bar': 'foo',
}
],
},
},
],
},
},
},
}
"""
MATRIX_REF_NONEXISTENT = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'bar_test': {},
},
},
}
"""
MATRIX_COMPOUND_REF_COMPOSITION = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
},
},
'compound_suites': {
'sample_composition': {
'foo_tests': {},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'sample_composition': {},
},
},
}
"""
MATRIX_COMPOSITION_REF_MATRIX = """\
{
'basic_suites': {
'foo_tests': {
'foo_test': {},
},
},
'matrix_compound_suites': {
'a_test': {
'foo_tests': {},
},
'matrix_tests': {
'a_test': {},
},
},
}
"""
MATRIX_COMPOUND_VARIANTS_MIXINS_MERGE = """\
{
'basic_suites': {
'foo_tests': {
'set': {
'mixins': [ 'test_mixin' ],
},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'mixins': [ 'dimension_mixin' ],
},
],
},
},
},
}
"""
MATRIX_COMPOUND_VARIANTS_MIXINS = """\
{
'basic_suites': {
'foo_tests': {
'set': {
'mixins': [ 'test_mixin' ],
},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'mixins': [
'dimension_mixin',
'waterfall_mixin',
'builder_mixin',
'random_mixin'
],
},
],
},
},
},
}
"""
MATRIX_COMPOUND_VARIANTS_MIXINS_REMOVE = """\
{
'basic_suites': {
'foo_tests': {
'set': {
'remove_mixins': ['builder_mixin'],
},
},
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'mixins': [ 'builder_mixin' ],
}
],
},
},
},
}
"""
MATRIX_COMPOUND_CONFLICTING_TEST_SUITES = """\
{
'basic_suites': {
'bar_tests': {
'baz_tests': {
'args': [
'--bar',
],
}
},
'foo_tests': {
'baz_tests': {
'args': [
'--foo',
],
}
},
},
'matrix_compound_suites': {
'matrix_tests': {
'bar_tests': {
'variants': [
{
'identifier': 'bar',
}
],
},
'foo_tests': {
'variants': [
{
'identifier': 'foo'
}
]
}
},
},
}
"""
MATRIX_COMPOUND_TARGETS_ARGS = """\
{
'basic_suites': {
'foo_tests': {
'args_test': {
'args': [
'--iam'
],
},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'identifier': 'args',
'args': [
'--anarg',
],
},
{
'identifier': 'swarming',
'swarming': {
'a': 'b',
'dimension_sets': [
{
'hello': 'world',
}
]
}
},
{
'identifier': 'mixins',
'mixins': [ 'dimension_mixin' ],
}
],
},
},
},
}
"""
MATRIX_COMPOUND_TARGETS_MIXINS = """\
{
'basic_suites': {
'foo_tests': {
'mixins_test': {
'mixins': [ 'test_mixin' ],
},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'mixins': [ 'random_mixin' ],
'variants': [
{
'identifier': 'args',
'args': [
'--anarg',
],
},
{
'identifier': 'swarming',
'swarming': {
'a': 'b',
'dimension_sets': [
{
'hello': 'world',
}
]
}
},
{
'identifier': 'mixins',
'mixins': [ 'dimension_mixin' ],
}
],
},
},
},
}
"""
MATRIX_COMPOUND_TARGETS_SWARMING = """\
{
'basic_suites': {
'foo_tests': {
'swarming_test': {
'swarming': {
'foo': 'bar',
'dimension_sets': [
{
'foo': 'bar',
},
],
},
},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
{
'identifier': 'args',
'args': [
'--anarg',
],
},
{
'identifier': 'swarming',
'swarming': {
'a': 'b',
'dimension_sets': [
{
'hello': 'world',
}
]
}
},
{
'identifier': 'mixins',
'mixins': [ 'dimension_mixin' ],
}
],
},
},
},
}
"""
MATRIX_COMPOUND_VARIANTS_REF = """\
{
'basic_suites': {
'foo_tests': {
'swarming_test': {},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
'a_variant'
],
},
},
},
}
"""
MATRIX_COMPOUND_TEST_WITH_TEST_KEY = """\
{
'basic_suites': {
'foo_tests': {
'swarming_test': {
'test': 'foo_test_apk'
},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
'a_variant',
],
},
},
},
}
"""
MATRIX_COMPOUND_MIXED_VARIANTS_REF = """\
{
'basic_suites': {
'foo_tests': {
'swarming_test': {},
}
},
'matrix_compound_suites': {
'matrix_tests': {
'foo_tests': {
'variants': [
'a_variant',
{
'args': [
'a',
'b'
],
'identifier': 'ab',
}
],
},
},
},
}
"""
VARIANTS_FILE = """\
{
'a_variant': {
'args': [
'--platform',
'device',
'--version',
'1'
],
'identifier': 'a_variant'
}
}
"""
MULTI_VARIANTS_FILE = """\
{
'a_variant': {
'args': [
'--platform',
'device',
'--version',
'1'
],
'identifier': 'a_variant'
},
'b_variant': {
'args': [
'--platform',
'sim',
'--version',
'2'
],
'identifier': 'b_variant'
}
}
"""
# # Dictionary composition test suite outputs
MATRIX_COMPOUND_EMPTY_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": []
}
}
"""
MATRIX_COMPOUND_TEST_SUITE_WITH_TEST_KEY_DICT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--platform",
"device",
"--version",
"1"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "swarming_test_a_variant",
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "foo_test_apk"
}
]
}
}
"""
MATRIX_TARGET_DICT_MERGE_OUTPUT_ARGS = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--iam",
"--anarg"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "args_test_args",
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "args_test"
},
{
"args": [
"--iam"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "args_test_mixins",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"iama": "mixin"
}
]
},
"test": "args_test"
},
{
"args": [
"--iam"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "args_test_swarming",
"swarming": {
"a": "b",
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"hello": "world"
}
]
},
"test": "args_test"
}
]
}
}
"""
MATRIX_TARGET_DICT_MERGE_OUTPUT_MIXINS = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--anarg"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "mixins_test_args",
"swarming": {
"can_use_on_swarming_builders": true,
"value": "test"
},
"test": "mixins_test",
"value": "random"
},
{
"args": [],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "mixins_test_mixins",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"iama": "mixin"
}
],
"value": "test"
},
"test": "mixins_test",
"value": "random"
},
{
"args": [],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "mixins_test_swarming",
"swarming": {
"a": "b",
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"hello": "world"
}
],
"value": "test"
},
"test": "mixins_test",
"value": "random"
}
]
}
}
"""
MATRIX_TARGET_DICT_MERGE_OUTPUT_SWARMING = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--anarg"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "swarming_test_args",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"foo": "bar"
}
],
"foo": "bar"
},
"test": "swarming_test"
},
{
"args": [],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "swarming_test_mixins",
"swarming": {
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"foo": "bar",
"iama": "mixin"
}
],
"foo": "bar"
},
"test": "swarming_test"
},
{
"args": [],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "swarming_test_swarming",
"swarming": {
"a": "b",
"can_use_on_swarming_builders": true,
"dimension_sets": [
{
"foo": "bar",
"hello": "world"
}
],
"foo": "bar"
},
"test": "swarming_test"
}
]
}
}
"""
MATRIX_COMPOUND_VARIANTS_REF_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"gtest_tests": [
{
"args": [
"--platform",
"device",
"--version",
"1"
],
"merge": {
"args": [],
"script": "//testing/merge_scripts/standard_gtest_merge.py"
},
"name": "swarming_test_a_variant",
"swarming": {
"can_use_on_swarming_builders": true
},
"test": "swarming_test"
}
]
}
}
"""
EMPTY_SKYLAB_TEST_EXCEPTIONS = """\
{
'tast.foo_OCTOPUS_TOT': {
'remove_from': [
'Fake Tester',
]
},
'tast.foo_OCTOPUS_TOT-1': {
'remove_from': [
'Fake Tester',
]
}
}
"""
MATRIX_SKYLAB_WATERFALL = """\
[
{
'project': 'chromium',
'bucket': 'ci',
'name': 'chromium.test',
'machines': {
'Fake Tester': {
'test_suites': {
'skylab_tests': 'cros_skylab_basic_x86',
},
},
},
},
]
"""
MATRIX_COMPOUND_SKYLAB_REF = """\
{
'basic_suites': {
'cros_skylab_basic': {
'tast.basic': {
'suite': 'tast.basic',
'timeout': 3600,
},
'tast.foo': {
'suite': 'tast.foo',
'timeout': 3600,
},
},
},
'compound_suites': {},
'matrix_compound_suites': {
'cros_skylab_basic_x86': {
'cros_skylab_basic': {
'variants': [
{
'skylab': {
'cros_board': 'octopus',
'cros_img': 'octopus-release/R89-13655.0.0',
},
'identifier': 'OCTOPUS_TOT',
},
{
'skylab': {
'cros_board': 'octopus',
'cros_img': 'octopus-release/R88-13597.23.0',
},
'identifier': 'OCTOPUS_TOT-1',
},
]
},
},
},
}
"""
VARIATION_SKYLAB_OUTPUT = """\
{
"AAAAA1 AUTOGENERATED FILE DO NOT EDIT": {},
"AAAAA2 See generate_buildbot_json.py to make changes": {},
"Fake Tester": {
"skylab_tests": [
{
"args": [],
"cros_board": "octopus",
"cros_img": "octopus-release/R89-13655.0.0",
"name": "tast.basic_OCTOPUS_TOT",
"suite": "tast.basic",
"swarming": {},
"test": "tast.basic",
"timeout": 3600
},
{
"args": [],
"cros_board": "octopus",
"cros_img": "octopus-release/R88-13597.23.0",
"name": "tast.basic_OCTOPUS_TOT-1",
"suite": "tast.basic",
"swarming": {},
"test": "tast.basic",
"timeout": 3600
}
]
}
}
"""
class MatrixCompositionTests(TestCase):
def test_good_structure_no_configs(self):
"""
Tests matrix compound test suite structure with no configs,
no conflicts and no bad references
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_EMPTY, LUCI_MILO_CFG)
self.create_testing_buildbot_json_file('chromium.test.json',
MATRIX_COMPOUND_EMPTY_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
MATRIX_COMPOUND_EMPTY_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_missing_identifier(self):
"""
Variant is missing an identifier
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_MISSING_IDENTIFIER, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Missing required identifier field in matrix compound suite*'):
fbb.check_output_file_consistency(verbose=True)
def test_mismatched_swarming_length(self):
"""
Swarming dimension set length mismatch test. Composition set > basic set
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_MISMATCHED_SWARMING_LENGTH, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Error merging lists by key *'):
fbb.check_output_file_consistency(verbose=True)
def test_noexistent_ref(self):
"""
Test referencing a non-existent basic test suite
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_REF_NONEXISTENT, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Unable to find reference to *'):
fbb.check_output_file_consistency(verbose=True)
def test_ref_to_composition(self):
"""
Test referencing another composition test suite
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_REF_COMPOSITION, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'matrix_compound_suites may not refer to other *'):
fbb.check_output_file_consistency(verbose=True)
def test_ref_to_matrix(self):
"""
Test referencing another matrix test suite
"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOSITION_REF_MATRIX, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'matrix_compound_suites may not refer to other *'):
fbb.check_output_file_consistency(verbose=True)
def test_conflicting_names(self):
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_CONFLICTING_TEST_SUITES, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Conflicting test definitions.*'):
fbb.check_input_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variants_swarming_dict_merge_args(self):
"""
Test targets with swarming dictionary defined by both basic and matrix
"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_TARGETS_ARGS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_ARGS)
self.create_testing_buildbot_json_file(
'chromium.ci.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_ARGS)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variants_swarming_dict_merge_mixins(self):
"""
Test targets with swarming dictionary defined by both basic and matrix
"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_TARGETS_MIXINS,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.ci.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_MIXINS)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variants_swarming_dict_swarming(self):
"""
Test targets with swarming dictionary defined by both basic and matrix
"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_TARGETS_SWARMING,
LUCI_MILO_CFG,
mixins=SWARMING_MIXINS)
self.create_testing_buildbot_json_file(
'chromium.test.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_SWARMING)
self.create_testing_buildbot_json_file(
'chromium.ci.json', MATRIX_TARGET_DICT_MERGE_OUTPUT_SWARMING)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variant_test_suite_with_test_key(self):
"""
Test targets in matrix compound test suites with variants
"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_TEST_WITH_TEST_KEY,
LUCI_MILO_CFG,
variants=VARIANTS_FILE)
self.create_testing_buildbot_json_file(
'chromium.test.json', MATRIX_COMPOUND_TEST_SUITE_WITH_TEST_KEY_DICT)
self.create_testing_buildbot_json_file(
'chromium.ci.json', MATRIX_COMPOUND_TEST_SUITE_WITH_TEST_KEY_DICT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variants_pyl_ref(self):
"""Test targets with variants string ref"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_VARIANTS_REF,
LUCI_MILO_CFG,
variants=VARIANTS_FILE)
self.create_testing_buildbot_json_file('chromium.test.json',
MATRIX_COMPOUND_VARIANTS_REF_OUTPUT)
self.create_testing_buildbot_json_file('chromium.ci.json',
MATRIX_COMPOUND_VARIANTS_REF_OUTPUT)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
def test_variants_pyl_no_ref(self):
"""Test targets with variants string ref, not defined in variants.pyl"""
fbb = FakeBBGen(self.args, MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_VARIANTS_REF, LUCI_MILO_CFG)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'Missing variant definition for *'):
fbb.check_output_file_consistency(verbose=True)
def test_variants_pyl_all_unreferenced(self):
"""Test targets with variants in variants.pyl, unreferenced in tests"""
fbb = FakeBBGen(self.args,
MATRIX_GTEST_SUITE_WATERFALL,
MATRIX_COMPOUND_MIXED_VARIANTS_REF,
LUCI_MILO_CFG,
variants=MULTI_VARIANTS_FILE)
# self.create_testing_buildbot_json_file(
# 'chromium.test.json', MATRIX_COMPOUND_VARIANTS_REF_OUTPUT)
with self.assertRaisesRegexp(generate_buildbot_json.BBGenErr,
'The following variants were unreferenced *'):
fbb.check_input_file_consistency(verbose=True)
def test_good_skylab_matrix_with_variants(self):
fbb = FakeBBGen(self.args,
MATRIX_SKYLAB_WATERFALL,
MATRIX_COMPOUND_SKYLAB_REF,
LUCI_MILO_CFG,
exceptions=EMPTY_SKYLAB_TEST_EXCEPTIONS)
self.create_testing_buildbot_json_file('chromium.test.json',
VARIATION_SKYLAB_OUTPUT)
fbb.check_input_file_consistency(verbose=True)
fbb.check_output_file_consistency(verbose=True)
self.assertFalse(fbb.printed_lines)
if __name__ == '__main__':
unittest.main()
| 26.141774
| 80
| 0.542039
| 14,316
| 151,753
| 5.362951
| 0.038069
| 0.028603
| 0.025594
| 0.032562
| 0.882007
| 0.846137
| 0.807531
| 0.778798
| 0.75663
| 0.723807
| 0
| 0.006539
| 0.329825
| 151,753
| 5,804
| 81
| 26.146278
| 0.74838
| 0.007565
| 0
| 0.58606
| 0
| 0.000924
| 0.488386
| 0.076164
| 0
| 0
| 0
| 0
| 0.036421
| 0
| null | null | 0.000924
| 0.001294
| null | null | 0.025143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7bb535d0f1c3374e1fc68ea2af29b1fe75740e3
| 810
|
py
|
Python
|
verk2.py
|
Kristberg/VEF-2VF05CU
|
f1d82106a6d2488412896e00e3bb64a3e70b1dd1
|
[
"MIT"
] | null | null | null |
verk2.py
|
Kristberg/VEF-2VF05CU
|
f1d82106a6d2488412896e00e3bb64a3e70b1dd1
|
[
"MIT"
] | null | null | null |
verk2.py
|
Kristberg/VEF-2VF05CU
|
f1d82106a6d2488412896e00e3bb64a3e70b1dd1
|
[
"MIT"
] | null | null | null |
import os
from flask import Flask
app = Flask(__name__)
@app.route("/")
def home():
return """
<h1>Hello user</h1>
<img src="http://loremflickr.com/600/400">
<p><a href="/sida2"title ="Síða 2">Síða 2</a> | <a href="/sida3"title ="Síða 3">Síða 3</a></p>
"""
@app.route("/sida2")
def sida2():
return"""
<h1>Hello user</h1>
<img src="http://loremflickr.com/600/400">
<p><a href="/"title ="Síða 1">Síða 1</a> | <a href="/sida3"title ="Síða 3">Síða 3</a></p>
"""
@app.route("/sida3")
def sida3():
return"""
<h1>Hello user</h1>
<img src="http://loremflickr.com/600/400">
<p><a href="/"title ="Síða 1">Síða 1</a> | <a href="/sida2"title ="Síða 2">Síða 2</a></p>
"""
if __name__ == '__main__':
# app.run(debug=True, use_reloader=True)
app.run()
| 23.142857
| 98
| 0.561728
| 129
| 810
| 3.426357
| 0.286822
| 0.067873
| 0.088235
| 0.115385
| 0.710407
| 0.710407
| 0.710407
| 0.710407
| 0.710407
| 0.617647
| 0
| 0.066768
| 0.18642
| 810
| 34
| 99
| 23.823529
| 0.603945
| 0.046914
| 0
| 0.461538
| 0
| 0.115385
| 0.696104
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0.076923
| 0.115385
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
f7c99c1ac5e29ad4fdca4d2c366a26cdf90b9ab0
| 119
|
py
|
Python
|
services/datetime_utils.py
|
AndriiOshtuk/MDN-DIY-mini-blog
|
957e2b75f7aa302bf50fc22cd799e8b30a17fc61
|
[
"MIT"
] | null | null | null |
services/datetime_utils.py
|
AndriiOshtuk/MDN-DIY-mini-blog
|
957e2b75f7aa302bf50fc22cd799e8b30a17fc61
|
[
"MIT"
] | 5
|
2020-06-23T06:37:49.000Z
|
2021-06-10T18:55:51.000Z
|
services/datetime_utils.py
|
AndriiOshtuk/MDN-DIY-mini-blog
|
957e2b75f7aa302bf50fc22cd799e8b30a17fc61
|
[
"MIT"
] | null | null | null |
from datetime import datetime
def utils_datetime():
"""Wrapper for datetime.today"""
return datetime.today()
| 17
| 36
| 0.714286
| 14
| 119
| 6
| 0.642857
| 0.309524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 119
| 6
| 37
| 19.833333
| 0.857143
| 0.218487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
792f1ea0b0aab661ff390a50cdc1258cab2a27f6
| 61
|
py
|
Python
|
src/models/__init__.py
|
katsura-jp/fedavg.pytorch
|
43680267cf839fbf56eec599605bed46e00328e9
|
[
"MIT"
] | 34
|
2020-04-28T02:22:07.000Z
|
2022-03-28T06:58:48.000Z
|
src/models/__init__.py
|
katsura-jp/fedavg.pytorch
|
43680267cf839fbf56eec599605bed46e00328e9
|
[
"MIT"
] | null | null | null |
src/models/__init__.py
|
katsura-jp/fedavg.pytorch
|
43680267cf839fbf56eec599605bed46e00328e9
|
[
"MIT"
] | 15
|
2020-06-03T15:15:00.000Z
|
2021-10-10T06:05:28.000Z
|
from src.models.mlp import MLP
from src.models.cnn import CNN
| 30.5
| 30
| 0.819672
| 12
| 61
| 4.166667
| 0.5
| 0.28
| 0.52
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114754
| 61
| 2
| 31
| 30.5
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f7140d6b0c6c816241055072b9f57a174a01a5af
| 13,262
|
py
|
Python
|
tests/test_zipreader.py
|
itsmehara/pysmi
|
51347f3c4adcc030afb9bc1ded8ce72748068b1b
|
[
"BSD-2-Clause"
] | 121
|
2016-05-17T14:19:25.000Z
|
2022-02-03T14:28:25.000Z
|
tests/test_zipreader.py
|
itsmehara/pysmi
|
51347f3c4adcc030afb9bc1ded8ce72748068b1b
|
[
"BSD-2-Clause"
] | 61
|
2016-05-16T20:45:32.000Z
|
2022-02-11T22:28:33.000Z
|
tests/test_zipreader.py
|
itsmehara/pysmi
|
51347f3c4adcc030afb9bc1ded8ce72748068b1b
|
[
"BSD-2-Clause"
] | 50
|
2016-05-16T20:04:51.000Z
|
2021-11-12T12:14:35.000Z
|
#
# This file is part of pysmi software.
#
# Copyright (c) 2015-2020, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysmi/license.html
#
import sys
import os
import tempfile
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import StringIO
except ImportError:
from io import StringIO
from pysmi.reader.zipreader import ZipReader
class ZipReaderTestCase(unittest.TestCase):
zipArchive = [
80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 8, 135, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 5, 0, 28, 0, 116, 101, 115, 116, 47, 85, 84, 9, 0, 3, 16, 211, 195, 89,
25, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80,
75, 3, 4, 10, 0, 0, 0, 0, 0, 230, 134, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 12, 0, 28, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47,
85, 84, 9, 0, 3, 207, 210, 195, 89, 3, 211, 195, 89, 117, 120, 11, 0, 1, 4,
140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 230, 134,
53, 75, 102, 214, 67, 99, 2, 0, 0, 0, 2, 0, 0, 0, 17, 0, 28, 0, 116, 101, 115,
116, 47, 115, 117, 98, 100, 105, 114, 47, 116, 101, 115, 116, 65, 85, 84, 9,
0, 3, 207, 210, 195, 89, 3, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0,
0, 4, 140, 102, 0, 0, 66, 10, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 2, 135, 53, 75,
162, 170, 2, 92, 138, 7, 0, 0, 138, 7, 0, 0, 13, 0, 28, 0, 116, 101, 115, 116,
47, 116, 101, 115, 116, 46, 122, 105, 112, 85, 84, 9, 0, 3, 3, 211, 195, 89,
3, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80,
75, 3, 4, 10, 0, 0, 0, 0, 0, 253, 134, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 5, 0, 28, 0, 116, 101, 115, 116, 47, 85, 84, 9, 0, 3, 253, 210, 195, 89, 3,
211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75,
3, 4, 10, 0, 0, 0, 0, 0, 230, 134, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
12, 0, 28, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47, 85, 84,
9, 0, 3, 207, 210, 195, 89, 3, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102,
0, 0, 4, 140, 102, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 130, 131, 53, 75,
227, 250, 30, 37, 12, 0, 0, 0, 12, 0, 0, 0, 21, 0, 28, 0, 116, 101, 115, 116,
47, 115, 117, 98, 100, 105, 114, 47, 116, 101, 115, 116, 65, 46, 116, 120,
116, 85, 84, 9, 0, 3, 116, 204, 195, 89, 134, 204, 195, 89, 117, 120, 11, 0,
1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 115, 117, 98, 100, 105, 114, 116,
101, 115, 116, 65, 10, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 109, 131, 53, 75, 237,
78, 102, 83, 6, 0, 0, 0, 6, 0, 0, 0, 14, 0, 28, 0, 116, 101, 115, 116, 47,
116, 101, 115, 116, 65, 46, 116, 120, 116, 85, 84, 9, 0, 3, 78, 204, 195, 89,
134, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0,
116, 101, 115, 116, 65, 10, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 144, 131, 53,
75, 204, 176, 61, 249, 144, 2, 0, 0, 144, 2, 0, 0, 13, 0, 28, 0, 116, 101,
115, 116, 47, 116, 101, 115, 116, 46, 122, 105, 112, 85, 84, 9, 0, 3, 143,
204, 195, 89, 143, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4,
140, 102, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 117, 131, 53, 75, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 28, 0, 116, 101, 115, 116, 47, 85, 84, 9, 0,
3, 94, 204, 195, 89, 98, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0,
4, 140, 102, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 130, 131, 53, 75, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 28, 0, 116, 101, 115, 116, 47, 115, 117,
98, 100, 105, 114, 47, 85, 84, 9, 0, 3, 116, 204, 195, 89, 134, 204, 195,
89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 3, 4,
10, 0, 0, 0, 0, 0, 130, 131, 53, 75, 227, 250, 30, 37, 12, 0, 0, 0, 12, 0, 0,
0, 21, 0, 28, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47, 116,
101, 115, 116, 65, 46, 116, 120, 116, 85, 84, 9, 0, 3, 116, 204, 195, 89, 116,
204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 115,
117, 98, 100, 105, 114, 116, 101, 115, 116, 65, 10, 80, 75, 3, 4, 10, 0, 0, 0,
0, 0, 109, 131, 53, 75, 237, 78, 102, 83, 6, 0, 0, 0, 6, 0, 0, 0, 14, 0, 28,
0, 116, 101, 115, 116, 47, 116, 101, 115, 116, 65, 46, 116, 120, 116, 85, 84,
9, 0, 3, 78, 204, 195, 89, 78, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102,
0, 0, 4, 140, 102, 0, 0, 116, 101, 115, 116, 65, 10, 80, 75, 1, 2, 30, 3, 10,
0, 0, 0, 0, 0, 117, 131, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 24,
0, 0, 0, 0, 0, 0, 0, 16, 0, 253, 65, 0, 0, 0, 0, 116, 101, 115, 116, 47, 85,
84, 5, 0, 3, 94, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140,
102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 130, 131, 53, 75, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 24, 0, 0, 0, 0, 0, 0, 0, 16, 0, 253, 65, 63,
0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47, 85, 84, 5,
0, 3, 116, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102,
0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 130, 131, 53, 75, 227, 250, 30,
37, 12, 0, 0, 0, 12, 0, 0, 0, 21, 0, 24, 0, 0, 0, 0, 0, 1, 0, 0, 0, 180, 129,
133, 0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47, 116,
101, 115, 116, 65, 46, 116, 120, 116, 85, 84, 5, 0, 3, 116, 204, 195, 89, 117,
120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10,
0, 0, 0, 0, 0, 109, 131, 53, 75, 237, 78, 102, 83, 6, 0, 0, 0, 6, 0, 0, 0, 14,
0, 24, 0, 0, 0, 0, 0, 1, 0, 0, 0, 180, 129, 224, 0, 0, 0, 116, 101, 115, 116,
47, 116, 101, 115, 116, 65, 46, 116, 120, 116, 85, 84, 5, 0, 3, 78, 204, 195,
89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 5, 6, 0,
0, 0, 0, 4, 0, 4, 0, 76, 1, 0, 0, 46, 1, 0, 0, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0,
0, 0, 230, 134, 53, 75, 102, 214, 67, 99, 2, 0, 0, 0, 2, 0, 0, 0, 17, 0, 28, 0,
116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114, 47, 116, 101, 115, 116,
65, 85, 84, 9, 0, 3, 207, 210, 195, 89, 207, 210, 195, 89, 117, 120, 11, 0, 1,
4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 66, 10, 80, 75, 3, 4, 10, 0, 0, 0, 0,
0, 253, 134, 53, 75, 39, 231, 88, 122, 2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 28, 0,
116, 101, 115, 116, 47, 116, 101, 115, 116, 67, 85, 84, 9, 0, 3, 253, 210,
195, 89, 253, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140,
102, 0, 0, 67, 10, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 211, 134, 53, 75, 165,
133, 110, 72, 2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 28, 0, 116, 101, 115, 116, 47,
116, 101, 115, 116, 65, 85, 84, 9, 0, 3, 173, 210, 195, 89, 173, 210, 195, 89,
117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 65, 10, 80, 75, 1,
2, 30, 3, 10, 0, 0, 0, 0, 0, 253, 134, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 5, 0, 24, 0, 0, 0, 0, 0, 0, 0, 16, 0, 253, 65, 0, 0, 0, 0, 116, 101, 115,
116, 47, 85, 84, 5, 0, 3, 253, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102,
0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 230, 134, 53,
75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 24, 0, 0, 0, 0, 0, 0, 0, 16,
0, 253, 65, 63, 0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114,
47, 85, 84, 5, 0, 3, 207, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0,
0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 130, 131, 53,
75, 227, 250, 30, 37, 12, 0, 0, 0, 12, 0, 0, 0, 21, 0, 24, 0, 0, 0, 0, 0, 1,
0, 0, 0, 180, 129, 133, 0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100,
105, 114, 47, 116, 101, 115, 116, 65, 46, 116, 120, 116, 85, 84, 5, 0, 3, 116,
204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80,
75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 109, 131, 53, 75, 237, 78, 102, 83, 6, 0,
0, 0, 6, 0, 0, 0, 14, 0, 24, 0, 0, 0, 0, 0, 1, 0, 0, 0, 180, 129, 224, 0, 0,
0, 116, 101, 115, 116, 47, 116, 101, 115, 116, 65, 46, 116, 120, 116, 85, 84,
5, 0, 3, 78, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102,
0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 144, 131, 53, 75, 204, 176, 61,
249, 144, 2, 0, 0, 144, 2, 0, 0, 13, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 180,
129, 46, 1, 0, 0, 116, 101, 115, 116, 47, 116, 101, 115, 116, 46, 122, 105,
112, 85, 84, 5, 0, 3, 143, 204, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0,
0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 230, 134, 53, 75,
102, 214, 67, 99, 2, 0, 0, 0, 2, 0, 0, 0, 17, 0, 24, 0, 0, 0, 0, 0, 1, 0, 0,
0, 180, 129, 5, 4, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100, 105, 114,
47, 116, 101, 115, 116, 65, 85, 84, 5, 0, 3, 207, 210, 195, 89, 117, 120, 11,
0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0,
0, 0, 253, 134, 53, 75, 39, 231, 88, 122, 2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 24,
0, 0, 0, 0, 0, 1, 0, 0, 0, 180, 129, 82, 4, 0, 0, 116, 101, 115, 116, 47, 116,
101, 115, 116, 67, 85, 84, 5, 0, 3, 253, 210, 195, 89, 117, 120, 11, 0, 1, 4,
140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0,
211, 134, 53, 75, 165, 133, 110, 72, 2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 24, 0, 0,
0, 0, 0, 1, 0, 0, 0, 180, 129, 152, 4, 0, 0, 116, 101, 115, 116, 47, 116, 101,
115, 116, 65, 85, 84, 5, 0, 3, 173, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140,
102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 5, 6, 0, 0, 0, 0, 8, 0, 8, 0, 150, 2,
0, 0, 222, 4, 0, 0, 0, 0, 80, 75, 3, 4, 10, 0, 0, 0, 0, 0, 211, 134, 53, 75,
165, 133, 110, 72, 2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 28, 0, 116, 101, 115, 116,
47, 116, 101, 115, 116, 65, 85, 84, 9, 0, 3, 173, 210, 195, 89, 3, 211, 195,
89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 65, 10, 80, 75,
1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 8, 135, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 5, 0, 24, 0, 0, 0, 0, 0, 0, 0, 16, 0, 253, 65, 0, 0, 0, 0, 116, 101,
115, 116, 47, 85, 84, 5, 0, 3, 16, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140,
102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 230,
134, 53, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 24, 0, 0, 0, 0, 0, 0,
0, 16, 0, 253, 65, 63, 0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98, 100,
105, 114, 47, 85, 84, 5, 0, 3, 207, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140,
102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 230,
134, 53, 75, 102, 214, 67, 99, 2, 0, 0, 0, 2, 0, 0, 0, 17, 0, 24, 0, 0, 0, 0,
0, 1, 0, 0, 0, 180, 129, 133, 0, 0, 0, 116, 101, 115, 116, 47, 115, 117, 98,
100, 105, 114, 47, 116, 101, 115, 116, 65, 85, 84, 5, 0, 3, 207, 210, 195,
89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0, 80, 75, 1, 2,
30, 3, 10, 0, 0, 0, 0, 0, 2, 135, 53, 75, 162, 170, 2, 92, 138, 7, 0, 0, 138,
7, 0, 0, 13, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 180, 129, 210, 0, 0, 0, 116,
101, 115, 116, 47, 116, 101, 115, 116, 46, 122, 105, 112, 85, 84, 5, 0, 3,
3, 211, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0,
80, 75, 1, 2, 30, 3, 10, 0, 0, 0, 0, 0, 211, 134, 53, 75, 165, 133, 110, 72,
2, 0, 0, 0, 2, 0, 0, 0, 10, 0, 24, 0, 0, 0, 0, 0, 1, 0, 0, 0, 180, 129, 163,
8, 0, 0, 116, 101, 115, 116, 47, 116, 101, 115, 116, 65, 85, 84, 5, 0, 3,
173, 210, 195, 89, 117, 120, 11, 0, 1, 4, 140, 102, 0, 0, 4, 140, 102, 0, 0,
80, 75, 5, 6, 0, 0, 0, 0, 5, 0, 5, 0, 151, 1, 0, 0, 233, 8, 0, 0, 0, 0]
if sys.version_info[0] < 3:
zipContents = ''.join([chr(x) for x in zipArchive])
else:
zipContents = bytes(zipArchive)
def testGetDataFromFile(self):
filename = None
try:
fd, filename = tempfile.mkstemp()
os.write(fd, self.zipContents)
os.close(fd)
zipReader = ZipReader(filename)
mibinfo, data = zipReader.getData('testA')
assert data == 'A\n'
except Exception:
pass
if filename:
try:
os.remove(filename)
except Exception:
pass
def testGetInnerZipData(self):
filename = None
try:
fd, filename = tempfile.mkstemp()
os.write(fd, self.zipContents)
os.close(fd)
zipReader = ZipReader(filename)
mibinfo, data = zipReader.getData('testC')
assert data == 'C\n'
except Exception:
pass
if filename:
try:
os.remove(filename)
except Exception:
pass
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite)
| 59.205357
| 87
| 0.454833
| 2,886
| 13,262
| 2.085586
| 0.060638
| 0.191062
| 0.181924
| 0.15883
| 0.888686
| 0.88852
| 0.881708
| 0.881708
| 0.881542
| 0.878717
| 0
| 0.565071
| 0.32906
| 13,262
| 223
| 88
| 59.470852
| 0.111373
| 0.010707
| 0
| 0.180851
| 0
| 0
| 0.00183
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 1
| 0.010638
| false
| 0.021277
| 0.053191
| 0
| 0.074468
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f751c3e6171e5dd6de2f88177f415d072de31d10
| 55,607
|
py
|
Python
|
SPC-MGR.py
|
Kali-Hac/SPC-MGR
|
3eccceeba97e0dca62132187c6645b98620f3bd1
|
[
"MIT"
] | null | null | null |
SPC-MGR.py
|
Kali-Hac/SPC-MGR
|
3eccceeba97e0dca62132187c6645b98620f3bd1
|
[
"MIT"
] | null | null | null |
SPC-MGR.py
|
Kali-Hac/SPC-MGR
|
3eccceeba97e0dca62132187c6645b98620f3bd1
|
[
"MIT"
] | 1
|
2021-12-30T09:17:49.000Z
|
2021-12-30T09:17:49.000Z
|
"""
The SPC-MGR is built based in part on graph attention mechanism (https://arxiv.org/abs/1710.10903),
part on MG-SCR (https://www.ijcai.org/proceedings/2021/0135),
and includes open-source codes provided by
the project of Graph Attention Network (GAT) at https://github.com/PetarV-/GAT,
and the project of MG-SCR at https://github.com/Kali-Hac/MG-SCR.
"""
import time
import numpy as np
import tensorflow as tf
import os, sys
from models import GAT as MSRL # (Veličković et al.)
from utils import process_L3 as process
from utils.faiss_rerank import compute_jaccard_distance
from tensorflow.python.layers.core import Dense
from sklearn.preprocessing import label_binarize
from sklearn.cluster import DBSCAN
import torch
import collections
from sklearn.metrics import average_precision_score
dataset = ''
probe = ''
pre_dir = 'ReID_Models/'
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
nb_nodes = 20 # number of nodes in joint-scale graph
nhood = 1 # structural relation learning (nhood=1 for neighbor nodes)
fusion_lambda = 1 # collaboration fusion coefficient
ft_size = 3 # originial node feature dimension (D)
time_step = 6 # sequence length (f)
# training params
batch_size = 256
nb_epochs = 100000
patience = 250 # patience for early stopping
hid_units = [8] # numbers of hidden units per each attention head in each layer
Ms = [8, 1] # additional entry for the output layer
k1, k2 = 20, 6 # parameters to compute feature distance matrix
residual = False
nonlinearity = tf.nn.elu
tf.app.flags.DEFINE_string('dataset', 'KS20', "Dataset: IAS, KS20, BIWI, CASIA-B or KGBD")
tf.app.flags.DEFINE_string('length', '6', "4, 6, 8, 10 or 12")
tf.app.flags.DEFINE_string('t', '0.07', "temperature for contrastive learning")
tf.app.flags.DEFINE_string('lr', '0.00035', "learning rate")
tf.app.flags.DEFINE_string('eps', '0.6', "distance parameter in DBSCAN")
tf.app.flags.DEFINE_string('min_samples', '2', "minimum sample number in DBSCAN")
tf.app.flags.DEFINE_string('probe', 'probe', "for testing probe")
tf.app.flags.DEFINE_string('gpu', '0', "GPU number")
tf.app.flags.DEFINE_string('probe_view', '', "test different views on CASIA B or KS20")
tf.app.flags.DEFINE_string('gallery_view', '', "test different views on CASIA B or KS20")
tf.app.flags.DEFINE_string('struct_only', '0', "struct_only")
tf.app.flags.DEFINE_string('m', '8', "structural relation heads")
tf.app.flags.DEFINE_string('probe_type', '', "probe.gallery")
tf.app.flags.DEFINE_string('patience', '200', "epochs for early stopping")
tf.app.flags.DEFINE_string('fusion_lambda', '1', "collaboration fusion coefficient")
tf.app.flags.DEFINE_string('S_dataset', '', "Source Dataset")
tf.app.flags.DEFINE_string('S_probe', '', "Source Dataset probe")
tf.app.flags.DEFINE_string('mode', 'UF', "Unsupervised Fine-tuning (UF) or Direct Generalization (DG)")
tf.app.flags.DEFINE_string('evaluate', '0', "evaluate on the best model")
FLAGS = tf.app.flags.FLAGS
# check parameters
if FLAGS.dataset not in ['IAS', 'KGBD', 'KS20', 'BIWI', 'CASIA_B']:
raise Exception('Dataset must be IAS, KGBD, KS20, BIWI or CASIA B.')
if not FLAGS.gpu.isdigit() or int(FLAGS.gpu) < 0:
raise Exception('GPU number must be a positive integer.')
if FLAGS.dataset == 'CASIA_B':
pass
else:
if FLAGS.length not in ['4', '6', '8', '10', '12']:
raise Exception('Length number must be 4, 6, 8, 10 or 12.')
if FLAGS.probe not in ['probe', 'Walking', 'Still', 'A', 'B']:
raise Exception('Dataset probe must be "A" (for IAS-A), "B" (for IAS-B), "probe" (for KS20, KGBD).')
if float(FLAGS.fusion_lambda) < 0 or float(FLAGS.fusion_lambda) > 1:
raise Exception('Multi-Level Graph Fusion coefficient must be not less than 0 or not larger than 1.')
if FLAGS.mode not in ['UF', 'DG']:
raise Exception('Mode must be UF or DG.')
if FLAGS.mode == 'DG' and FLAGS.S_dataset == '':
raise Exception('DG mode must set a source dataset.')
if FLAGS.mode == 'UF' and FLAGS.S_dataset != '':
raise Exception('UF mode does not use a source dataset.')
os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpu
dataset = FLAGS.dataset
# optimal paramters
if dataset == 'KGBD':
batch_size = 256
FLAGS.lr = '0.00035'
FLAGS.min_samples = '4'
FLAGS.t = '0.06'
elif dataset == 'CASIA_B':
batch_size = 128
FLAGS.lr = '0.00035'
FLAGS.min_samples = '2'
FLAGS.eps = '0.75'
FLAGS.t = '0.075'
else:
batch_size = 128
FLAGS.lr = '0.00035'
if dataset == 'KS20' or dataset == 'IAS':
FLAGS.t = '0.08'
FLAGS.eps = '0.8'
elif dataset == 'BIWI':
FLAGS.t = '0.07'
eps = float(FLAGS.eps)
min_samples = int(FLAGS.min_samples)
time_step = int(FLAGS.length)
fusion_lambda = float(FLAGS.fusion_lambda)
probe = FLAGS.probe
patience = int(FLAGS.patience)
global_att = False
struct_only = False
P = '8'
change = ''
if FLAGS.probe_type != '':
change += '_CME'
if FLAGS.fusion_lambda != '1':
change = '_lambda_' + FLAGS.fusion_lambda
if FLAGS.struct_only == '1':
struct_only = True
if FLAGS.dataset == 'KGBD':
FLAGS.m = '16'
if FLAGS.m != '8':
m = FLAGS.m
Ms = [int(m), 1]
try:
os.mkdir(pre_dir)
except:
pass
if struct_only:
pre_dir += '_struct_only'
if P != '8':
pre_dir += '_P_' + P
if dataset == 'KS20':
nb_nodes = 25
if dataset == 'CASIA_B':
nb_nodes = 14
print('----- Model hyperparams -----')
# print('skeleton_nodes: ' + str(nb_nodes))
print('seqence_length: ' + str(time_step))
print('fusion_lambda: ' + str(fusion_lambda))
print('batch_size: ' + str(batch_size))
print('lr: ' + str(FLAGS.lr))
print('temperature: ' + FLAGS.t)
print('eps: ' + FLAGS.eps)
print('min_samples: ' + FLAGS.min_samples)
print('m: ' + FLAGS.m)
print('fusion_lambda: ' + FLAGS.fusion_lambda)
# print('patience: ' + FLAGS.patience)
print('Mode: ' + FLAGS.mode)
print('Evaluate: ' + FLAGS.evaluate)
if FLAGS.mode == 'DG':
print('----- Mode Information -----')
print('Source Dataset: ' + FLAGS.S_dataset)
print('Target Dataset: ' + FLAGS.dataset)
print('Target Probe: ' + FLAGS.probe)
elif FLAGS.mode == 'UF':
print('----- Dataset Information -----')
print('Dataset: ' + dataset)
if dataset == 'CASIA_B':
print('Probe.Gallery: ', FLAGS.probe_type.split('.')[0], FLAGS.probe_type.split('.')[1])
else:
print('Probe: ' + FLAGS.probe)
"""
Obtain training and testing data in part-level, body-scale, and hyper-body-scale.
Generate corresponding adjacent matrix and bias.
"""
if FLAGS.probe_type == '':
if FLAGS.probe_view == '' and FLAGS.gallery_view == '':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_test_P, X_test_B, X_test_H_B, _, y_test, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split=probe, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size, )
else:
if dataset == 'KS20':
_, _, _, _, _, _, _, X_test_P, X_test_B, X_test_H_B, _, y_test, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='view_'+FLAGS.probe_view, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
X_train_P_all = []
X_train_B_all = []
X_train_H_B_all = []
y_train_all = []
for i in range(5):
if str(i) not in [FLAGS.probe_view, FLAGS.gallery_view]:
_, _, _, _, _, _, _, X_train_P, X_train_B, X_train_H_B, _, y_train, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='view_' + str(i), time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
X_train_H_B_all.extend(X_train_H_B)
X_train_P_all.extend(X_train_P)
X_train_B_all.extend(X_train_B)
y_train_all.extend(y_train_all)
X_train_P = np.array(X_train_P_all)
X_train_B = np.array(X_train_B_all)
X_train_H_B = np.array(X_train_H_B_all)
y_train = np.array(y_train)
else:
from utils import process_cme_L3 as process
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_test_P, X_test_B, X_test_H_B, _, y_test, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split=probe, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size, PG_type=FLAGS.probe_type.split('.')[0])
print('## [Probe].[Gallery]', FLAGS.probe_type)
all_ftr_size = hid_units[0] * (15 + 3)
loaded_graph = tf.Graph()
cluster_epochs = 15000
display = 20
if FLAGS.evaluate == '1':
FLAGS.S_dataset = FLAGS.dataset
FLAGS.S_probe = FLAGS.probe
FLAGS.mode = 'DG'
if FLAGS.mode == 'UF':
with tf.Graph().as_default():
with tf.name_scope('Input'):
P_in = tf.placeholder(dtype=tf.float32, shape=(batch_size * time_step, 10, ft_size))
B_in = tf.placeholder(dtype=tf.float32, shape=(batch_size * time_step, 5, ft_size))
H_B_in = tf.placeholder(dtype=tf.float32, shape=(batch_size * time_step, 3, ft_size))
P_bias_in = tf.placeholder(dtype=tf.float32, shape=(1, 10, 10))
B_bias_in = tf.placeholder(dtype=tf.float32, shape=(1, 5, 5))
H_B_bias_in = tf.placeholder(dtype=tf.float32, shape=(1, 3, 3))
attn_drop = tf.placeholder(dtype=tf.float32, shape=())
ffd_drop = tf.placeholder(dtype=tf.float32, shape=())
is_train = tf.placeholder(dtype=tf.bool, shape=())
pseudo_lab = tf.placeholder(dtype=tf.int32, shape=(batch_size,))
cluster_ftr = tf.placeholder(dtype=tf.float32, shape=(None, all_ftr_size))
with tf.name_scope("MG"), tf.variable_scope("MG", reuse=tf.AUTO_REUSE):
def SRL(J_in, J_bias_in, nb_nodes):
W_h = tf.Variable(tf.random_normal([3, hid_units[-1]]))
b_h = tf.Variable(tf.zeros(shape=[hid_units[-1], ]))
J_h = tf.reshape(J_in, [-1, ft_size])
J_h = tf.matmul(J_h, W_h) + b_h
J_h = tf.reshape(J_h, [batch_size*time_step, nb_nodes, hid_units[-1]])
J_seq_ftr = MSRL.inference(J_h, 0, nb_nodes, is_train,
attn_drop, ffd_drop,
bias_mat=J_bias_in,
hid_units=hid_units, n_heads=Ms,
residual=residual, activation=nonlinearity, r_pool=True)
return J_seq_ftr
def FCRL(s1, s2, s1_num, s2_num, hid_in):
r_unorm = tf.matmul(s2, tf.transpose(s1, [0, 2, 1]))
att_w = tf.nn.softmax(r_unorm)
att_w = tf.expand_dims(att_w, axis=-1)
s1 = tf.reshape(s1, [s1.shape[0], 1, s1.shape[1], hid_in])
c_ftr = tf.reduce_sum(att_w * s1, axis=2)
c_ftr = tf.reshape(c_ftr, [-1, hid_in])
att_w = tf.reshape(att_w, [-1, s1_num * s2_num])
return r_unorm, c_ftr
def MSC(P_in, B_in, H_B_in, P_bias_in, B_bias_in, H_B_bias_in, hid_in, hid_out):
h_P_seq_ftr = SRL(J_in=P_in, J_bias_in=P_bias_in, nb_nodes=10)
h_B_seq_ftr = SRL(J_in=B_in, J_bias_in=B_bias_in, nb_nodes=5)
h_H_B_seq_ftr = SRL(J_in=H_B_in, J_bias_in=H_B_bias_in, nb_nodes=3)
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, 10, hid_in])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, 5, hid_in])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, 3, hid_in])
W_cs_23 = tf.Variable(tf.random_normal([hid_in, hid_out]))
W_cs_24 = tf.Variable(tf.random_normal([hid_in, hid_out]))
W_cs_34 = tf.Variable(tf.random_normal([hid_in, hid_out]))
W_self_2 = tf.Variable(tf.random_normal([hid_in, hid_out]))
W_self_3 = tf.Variable(tf.random_normal([hid_in, hid_out]))
W_self_4 = tf.Variable(tf.random_normal([hid_in, hid_out]))
self_a_2, self_r_2 = FCRL(h_P_seq_ftr, h_P_seq_ftr, 10, 10, hid_in)
self_a_3, self_r_3 = FCRL(h_B_seq_ftr, h_B_seq_ftr, 5, 5, hid_in)
self_a_4, self_r_4 = FCRL(h_H_B_seq_ftr, h_H_B_seq_ftr, 3, 3, hid_in)
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, hid_in])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, hid_in])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, hid_in])
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, 10, hid_in])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, 5, hid_in])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, 3, hid_in])
a_23, r_23 = FCRL(h_B_seq_ftr, h_P_seq_ftr, 5, 10, hid_in)
a_24, r_24 = FCRL(h_H_B_seq_ftr, h_P_seq_ftr, 3, 10, hid_in)
a_34, r_34 = FCRL(h_H_B_seq_ftr, h_B_seq_ftr, 3, 5, hid_in)
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, hid_in])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, hid_in])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, hid_in])
if not struct_only:
h_P_seq_ftr = h_P_seq_ftr + float(FLAGS.fusion_lambda) * (
tf.matmul(self_r_2, W_self_2) + tf.matmul(r_23, W_cs_23) + tf.matmul(r_24, W_cs_24))
h_B_seq_ftr = h_B_seq_ftr + float(FLAGS.fusion_lambda) * (tf.matmul(self_r_3, W_self_3) + tf.matmul(r_34, W_cs_34))
h_H_B_seq_ftr = h_H_B_seq_ftr + float(FLAGS.fusion_lambda) * (tf.matmul(self_r_4, W_self_4))
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, 10, hid_out])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, 5, hid_out])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, 3, hid_out])
return h_H_B_seq_ftr, h_B_seq_ftr, h_P_seq_ftr
h_H_B_seq_ftr, h_B_seq_ftr, h_P_seq_ftr = MSC(P_in, B_in, H_B_in, P_bias_in, B_bias_in, H_B_bias_in,
hid_units[-1], hid_units[-1])
h_P_seq_ftr = tf.reshape(h_P_seq_ftr, [-1, hid_units[-1]])
h_B_seq_ftr = tf.reshape(h_B_seq_ftr, [-1, hid_units[-1]])
h_H_B_seq_ftr = tf.reshape(h_H_B_seq_ftr, [-1, hid_units[-1]])
optimizer = tf.train.AdamOptimizer(learning_rate=float(FLAGS.lr))
P_encode = tf.reduce_mean(tf.reshape(h_P_seq_ftr, [batch_size, time_step, -1]), axis=1)
B_encode = tf.reduce_mean(tf.reshape(h_B_seq_ftr, [batch_size, time_step, -1]), axis=1)
H_B_encode = tf.reduce_mean(tf.reshape(h_H_B_seq_ftr, [batch_size, time_step, -1]), axis=1)
P_encode = tf.reshape(P_encode, [batch_size, -1])
B_encode = tf.reshape(B_encode, [batch_size, -1])
H_B_encode = tf.reshape(H_B_encode, [batch_size, -1])
all_ftr = tf.concat([P_encode, B_encode, H_B_encode], axis=-1)
all_ftr = tf.reshape(all_ftr, [batch_size, -1])
output = tf.matmul(all_ftr, tf.transpose(cluster_ftr))
def cluster_loss(pseudo_lab, all_ftr, cluster_ftr):
all_ftr = tf.nn.l2_normalize(all_ftr, axis=-1)
cluster_ftr = tf.nn.l2_normalize(cluster_ftr, axis=-1)
output = tf.matmul(all_ftr, tf.transpose(cluster_ftr))
output /= float(FLAGS.t)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=pseudo_lab, logits=output))
return loss
def empty_loss(b):
return tf.zeros([1])
contrastive_loss = tf.cond(tf.reduce_sum(pseudo_lab) > 0,
lambda: cluster_loss(pseudo_lab, all_ftr, cluster_ftr),
lambda: empty_loss(pseudo_lab))
cluster_train_op = optimizer.minimize(contrastive_loss)
saver = tf.train.Saver()
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
with tf.Session(config=config) as sess:
sess.run(init_op)
def train_loader(X_train_P, X_train_B, X_train_H_B, y_train):
tr_step = 0
tr_size = X_train_P.shape[0]
train_logits_all = []
train_labels_all = []
train_features_all = []
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_train[tr_step * batch_size:(tr_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
train_features_all.extend(all_features.tolist())
train_labels_all.extend(labels.tolist())
tr_step += 1
train_features_all = np.array(train_features_all).astype(np.float32)
train_features_all = torch.from_numpy(train_features_all)
return train_features_all, train_labels_all
def gal_loader(X_train_P, X_train_B, X_train_H_B, y_train):
tr_step = 0
tr_size = X_train_P.shape[0]
gal_logits_all = []
gal_labels_all = []
gal_features_all = []
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_train[tr_step * batch_size:(tr_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
gal_features_all.extend(all_features.tolist())
gal_labels_all.extend(labels.tolist())
tr_step += 1
return gal_features_all, gal_labels_all
def evaluation():
vl_step = 0
vl_size = X_test_P.shape[0]
pro_labels_all = []
pro_features_all = []
loaded_graph = tf.get_default_graph()
while vl_step * batch_size < vl_size:
if (vl_step + 1) * batch_size > vl_size:
break
X_input_P = X_test_P[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_test_B[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_test_H_B[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_test[vl_step * batch_size:(vl_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: False,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
pro_labels_all.extend(labels.tolist())
pro_features_all.extend(all_features.tolist())
vl_step += 1
X = np.array(gal_features_all)
y = np.array(gal_labels_all)
t_X = np.array(pro_features_all)
t_y = np.array(pro_labels_all)
# print(X.shape, t_X.shape)
t_y = np.argmax(t_y, axis=-1)
y = np.argmax(y, axis=-1)
def mean_ap(distmat, query_ids=None, gallery_ids=None,
query_cams=None, gallery_cams=None):
# distmat = to_numpy(distmat)
m, n = distmat.shape
# Fill up default values
if query_ids is None:
query_ids = np.arange(m)
if gallery_ids is None:
gallery_ids = np.arange(n)
if query_cams is None:
query_cams = np.zeros(m).astype(np.int32)
if gallery_cams is None:
gallery_cams = np.ones(n).astype(np.int32)
# Ensure numpy array
query_ids = np.asarray(query_ids)
gallery_ids = np.asarray(gallery_ids)
query_cams = np.asarray(query_cams)
gallery_cams = np.asarray(gallery_cams)
# Sort and find correct matches
indices = np.argsort(distmat, axis=1)
matches = (gallery_ids[indices] == query_ids[:, np.newaxis])
# Compute AP for each query
aps = []
if (FLAGS.probe_view != '' and (FLAGS.probe_view == FLAGS.gallery_view or FLAGS.probe_type == 'nm.nm')) or (FLAGS.probe_type == 'cl.cl' or FLAGS.probe_type == 'bg.bg'):
for i in range(1, m):
valid = ((gallery_ids[indices[i]] != query_ids[i]) |
(gallery_cams[indices[i]] != query_cams[i]))
y_true = matches[i, valid]
y_score = -distmat[i][indices[i]][valid]
if not np.any(y_true): continue
aps.append(average_precision_score(y_true, y_score))
else:
for i in range(m):
valid = ((gallery_ids[indices[i]] != query_ids[i]) |
(gallery_cams[indices[i]] != query_cams[i]))
y_true = matches[i, valid]
y_score = -distmat[i][indices[i]][valid]
if not np.any(y_true): continue
aps.append(average_precision_score(y_true, y_score))
if len(aps) == 0:
raise RuntimeError("No valid query")
return np.mean(aps)
def metrics(X, y, t_X, t_y):
# compute Euclidean distance
if dataset != 'CASIA_B':
a, b = torch.from_numpy(t_X), torch.from_numpy(X)
m, n = a.size(0), b.size(0)
a = a.view(m, -1)
b = b.view(n, -1)
dist_m = torch.pow(a, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(b, 2).sum(dim=1, keepdim=True).expand(n, m).t()
dist_m.addmm_(1, -2, a, b.t())
dist_m = dist_m.sqrt()
mAP = mean_ap(distmat=dist_m.numpy(), query_ids=t_y, gallery_ids=y)
_, dist_sort = dist_m.sort(1)
dist_sort = dist_sort.numpy()
else:
X = np.array(X)
t_X = np.array(t_X)
# pred = [cp.argmin(cp.linalg.norm(X - i, axis=1)).tolist() for i in t_X]
dist_m = [(np.linalg.norm(X - i, axis=1)).tolist() for i in t_X]
dist_m = np.array(dist_m)
mAP = mean_ap(distmat=dist_m, query_ids=t_y, gallery_ids=y)
dist_sort = [np.argsort(np.linalg.norm(X - i, axis=1)).tolist() for i in t_X]
dist_sort = np.array(dist_sort)
top_1 = top_5 = top_10 = 0
probe_num = dist_sort.shape[0]
if (FLAGS.probe_view != '' and (FLAGS.probe_view == FLAGS.gallery_view or FLAGS.probe_type == 'nm.nm')) or (FLAGS.probe_type == 'cl.cl' or FLAGS.probe_type == 'bg.bg'):
for i in range(probe_num):
# print(dist_sort[i, :10])
if t_y[i] in y[dist_sort[i, 1:2]]:
top_1 += 1
if t_y[i] in y[dist_sort[i, 1:6]]:
top_5 += 1
if t_y[i] in y[dist_sort[i, 1:11]]:
top_10 += 1
else:
for i in range(probe_num):
# print(dist_sort[i, :10])
if t_y[i] in y[dist_sort[i, :1]]:
top_1 += 1
if t_y[i] in y[dist_sort[i, :5]]:
top_5 += 1
if t_y[i] in y[dist_sort[i, :10]]:
top_10 += 1
return mAP, top_1 / probe_num, top_5 / probe_num, top_10 / probe_num
mAP, top_1, top_5, top_10 = metrics(X, y, t_X, t_y)
return mAP, top_1, top_5, top_10
max_acc_1 = 0
max_acc_2 = 0
best_cluster_info_1 = [0, 0]
best_cluster_info_2 = [0, 0]
cur_patience = 0
if dataset == 'KGBD' or dataset == 'KS20':
if FLAGS.gallery_view == '' and FLAGS.probe_view == '':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='gallery', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, _, _, _, _, _, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, b_, nb_classes = \
process.gen_train_data(dataset=dataset, split='view_'+FLAGS.gallery_view, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'BIWI':
if probe == 'Walking':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='Still', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='Walking', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'IAS':
if probe == 'A':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='B', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='A', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'CASIA_B':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split=probe, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
PG_type=FLAGS.probe_type.split('.')[1])
for epoch in range(cluster_epochs):
train_features_all, train_labels_all = train_loader(X_train_P, X_train_B, X_train_H_B, y_train)
gal_features_all, gal_labels_all = gal_loader(X_gal_P, X_gal_B, X_gal_H_B, y_gal)
mAP, top_1, top_5, top_10 = evaluation()
cur_patience += 1
if epoch > 0 and top_1 > max_acc_2:
max_acc_1 = mAP
best_cluster_info_1[0] = num_cluster
best_cluster_info_1[1] = outlier_num
cur_patience = 0
if FLAGS.mode == 'UF' and FLAGS.S_dataset == '':
if FLAGS.probe_view == '' and FLAGS.gallery_view == '' and FLAGS.dataset != 'CASIA_B':
# checkpt_file = pre_dir + dataset + '/' + probe + '_' + str(fusion_lambda) + '_' + str(
# nhood) + '_' + str(
# time_step) + '_' + FLAGS.min_samples + '_' + FLAGS.lr + '_' + FLAGS.eps + '_' + \
# FLAGS.t + '_' + change + '_best.ckpt'
checkpt_file = pre_dir + dataset + '/' + probe + change + '_best.ckpt'
elif FLAGS.dataset == 'CASIA_B':
# checkpt_file = pre_dir + dataset + '/' + probe + '_' + str(fusion_lambda) + '_' + str(
# nhood) + '_' + str(
# time_step) + '_' + FLAGS.min_samples + '_' + FLAGS.lr + '_' + FLAGS.eps + '_' + \
# FLAGS.t + '_' + change + '_' + FLAGS.probe_type + '_best.ckpt'
checkpt_file = pre_dir + dataset + '/' + probe + change + '_' + FLAGS.probe_type + '_best.ckpt'
else:
# checkpt_file = pre_dir + dataset + '/' + probe + '_' + str(fusion_lambda) + '_' + str(
# nhood) + '_' + str(
# time_step) + '_' + FLAGS.min_samples + '_' + FLAGS.lr + '_' + FLAGS.eps + '_' + \
# FLAGS.t + '_' + FLAGS.probe_view + 'v' + FLAGS.gallery_view + change + '_best.ckpt'
checkpt_file = pre_dir + dataset + '/' + probe + '_' + FLAGS.probe_view + 'v' + FLAGS.gallery_view + change + '_best.ckpt'
print(checkpt_file)
saver.save(sess, checkpt_file)
if epoch > 0 and top_1 > max_acc_2:
max_acc_2 = top_1
best_cluster_info_2[0] = num_cluster
best_cluster_info_2[1] = outlier_num
cur_patience = 0
if epoch > 0:
if FLAGS.probe_view != '' and FLAGS.gallery_view != '':
print('[UF] View: %s v %s | mAP: %.4f (%.4f) | Top-1: %.4f (%.4f) | Top-5: %.4f | Top-10: %.4f | % d + o: %d |' % (
FLAGS.probe_view, FLAGS.gallery_view, mAP, max_acc_1,
top_1, max_acc_2, top_5, top_10,
best_cluster_info_2[0], best_cluster_info_2[1]))
else:
print(
'[UF] %s - %s | mAP: %.4f (%.4f) | Top-1: %.4f (%.4f) | Top-5: %.4f | Top-10: %.4f | % d + o: %d |' % (
FLAGS.dataset, FLAGS.probe, mAP, max_acc_1,
top_1, max_acc_2, top_5, top_10,
best_cluster_info_2[0], best_cluster_info_2[1]))
if cur_patience == patience:
break
rerank_dist = compute_jaccard_distance(train_features_all, k1=k1, k2=k2)
if dataset == 'IAS' or dataset == 'KS20':
cluster = DBSCAN(eps=eps, min_samples=min_samples, metric='precomputed', n_jobs=-1)
else:
cluster = DBSCAN(eps=eps, min_samples=min_samples, metric='precomputed', n_jobs=-1)
pseudo_labels = cluster.fit_predict(rerank_dist)
# discard outliers
train_features_all = train_features_all[np.where(pseudo_labels != -1)]
X_train_P_new = X_train_P[np.where(pseudo_labels != -1)]
X_train_B_new = X_train_B[np.where(pseudo_labels != -1)]
X_train_H_B_new = X_train_H_B[np.where(pseudo_labels != -1)]
outlier_num = np.sum(pseudo_labels == -1)
pseudo_labels = pseudo_labels[np.where(pseudo_labels != -1)]
num_cluster = len(set(pseudo_labels)) - (1 if -1 in pseudo_labels else 0)
def generate_cluster_features(labels, features):
centers = collections.defaultdict(list)
for i, label in enumerate(labels):
if label == -1:
continue
centers[labels[i]].append(features[i])
centers = [
torch.stack(centers[idx], dim=0).mean(0) for idx in sorted(centers.keys())
]
centers = torch.stack(centers, dim=0)
return centers
cluster_features = generate_cluster_features(pseudo_labels, train_features_all)
cluster_features = cluster_features.numpy()
cluster_features = cluster_features.astype(np.float64)
tr_step = 0
tr_size = X_train_P_new.shape[0]
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = pseudo_labels[tr_step * batch_size:(tr_step + 1) * batch_size]
_, loss, P_en, B_en, all_features = sess.run(
[cluster_train_op, contrastive_loss, P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: labels,
cluster_ftr: cluster_features})
if tr_step % display == 0:
print('[%s] Batch num: %d | Cluser num: %d | Outlier: %d | Loss: %.5f |' %
(str(epoch), tr_step, num_cluster, outlier_num, loss))
tr_step += 1
sess.close()
elif FLAGS.mode == 'DG' and FLAGS.S_dataset != '':
if FLAGS.S_dataset == 'KGBD':
batch_size = 256
FLAGS.lr = '0.00035'
FLAGS.min_samples = '4'
FLAGS.eps = '0.6'
elif FLAGS.S_dataset == 'CASIA_B':
batch_size = 128
FLAGS.lr = '0.00035'
FLAGS.min_samples = '2'
FLAGS.eps = '0.75'
else:
batch_size = 128
FLAGS.lr = '0.00035'
if FLAGS.S_dataset == 'IAS' or FLAGS.S_dataset == 'KS20':
# if FLAGS.mode != 'DG':
FLAGS.eps = '0.8'
FLAGS.min_samples = '2'
if FLAGS.S_dataset == 'KS20':
FLAGS.min_samples = '2'
if FLAGS.S_dataset == 'BIWI':
FLAGS.min_samples = '2'
if FLAGS.S_probe == 'Walking':
FLAGS.eps = '0.6'
else:
FLAGS.eps = '0.7'
# checkpt_file = pre_dir + FLAGS.S_dataset + '/' + FLAGS.S_probe + '_' + str(fusion_lambda) + '_' + str(
# nhood) + '_' + str(
# time_step) + '_' + FLAGS.min_samples + '_' + FLAGS.lr + '_' + FLAGS.eps + '_' + \
# FLAGS.t + '_' + change + '_best.ckpt'
checkpt_file = pre_dir + dataset + '/' + probe + change + '_best.ckpt'
change = '_DG'
with tf.Session(graph=loaded_graph, config=config) as sess:
loader = tf.train.import_meta_graph(checkpt_file + '.meta')
P_in = loaded_graph.get_tensor_by_name("Input/Placeholder:0")
B_in = loaded_graph.get_tensor_by_name("Input/Placeholder_1:0")
H_B_in = loaded_graph.get_tensor_by_name("Input/Placeholder_2:0")
P_bias_in = loaded_graph.get_tensor_by_name("Input/Placeholder_3:0")
B_bias_in = loaded_graph.get_tensor_by_name("Input/Placeholder_4:0")
H_B_bias_in = loaded_graph.get_tensor_by_name("Input/Placeholder_5:0")
attn_drop = loaded_graph.get_tensor_by_name("Input/Placeholder_6:0")
ffd_drop = loaded_graph.get_tensor_by_name("Input/Placeholder_7:0")
is_train = loaded_graph.get_tensor_by_name("Input/Placeholder_8:0")
pseudo_lab = loaded_graph.get_tensor_by_name("Input/Placeholder_9:0")
cluster_ftr = loaded_graph.get_tensor_by_name("Input/Placeholder_10:0")
P_encode = loaded_graph.get_tensor_by_name("MG/MG/Reshape_45:0")
B_encode = loaded_graph.get_tensor_by_name("MG/MG/Reshape_46:0")
H_B_encode = loaded_graph.get_tensor_by_name("MG/MG/Reshape_47:0")
all_ftr = loaded_graph.get_tensor_by_name("MG/MG/Reshape_48:0")
contrastive_loss = loaded_graph.get_tensor_by_name("MG/MG/cond/Merge:0")
cluster_train_op = loaded_graph.get_operation_by_name("MG/MG/Adam")
init_op = tf.global_variables_initializer()
sess.run(init_op)
loader.restore(sess, checkpt_file)
saver = tf.train.Saver()
def train_loader(X_train_P, X_train_B, X_train_H_B, y_train):
tr_step = 0
tr_size = X_train_P.shape[0]
train_logits_all = []
train_labels_all = []
train_features_all = []
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_train[tr_step * batch_size:(tr_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
train_features_all.extend(all_features.tolist())
train_labels_all.extend(labels.tolist())
tr_step += 1
train_features_all = np.array(train_features_all).astype(np.float32)
train_features_all = torch.from_numpy(train_features_all)
return train_features_all, train_labels_all
def gal_loader(X_train_P, X_train_B, X_train_H_B, y_train):
tr_step = 0
tr_size = X_train_P.shape[0]
gal_logits_all = []
gal_labels_all = []
gal_features_all = []
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_train[tr_step * batch_size:(tr_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
gal_features_all.extend(all_features.tolist())
gal_labels_all.extend(labels.tolist())
tr_step += 1
return gal_features_all, gal_labels_all
def evaluation():
vl_step = 0
vl_size = X_test_P.shape[0]
pro_labels_all = []
pro_features_all = []
loaded_graph = tf.get_default_graph()
while vl_step * batch_size < vl_size:
if (vl_step + 1) * batch_size > vl_size:
break
X_input_P = X_test_P[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_test_B[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_test_H_B[vl_step * batch_size:(vl_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = y_test[vl_step * batch_size:(vl_step + 1) * batch_size]
P_en, B_en, all_features = sess.run([P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: False,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: np.zeros([batch_size, ]),
cluster_ftr: np.zeros(
[batch_size, all_ftr_size])})
pro_labels_all.extend(labels.tolist())
pro_features_all.extend(all_features.tolist())
vl_step += 1
X = np.array(gal_features_all)
y = np.array(gal_labels_all)
t_X = np.array(pro_features_all)
t_y = np.array(pro_labels_all)
# print(X.shape, t_X.shape)
t_y = np.argmax(t_y, axis=-1)
y = np.argmax(y, axis=-1)
def mean_ap(distmat, query_ids=None, gallery_ids=None,
query_cams=None, gallery_cams=None):
# distmat = to_numpy(distmat)
m, n = distmat.shape
# Fill up default values
if query_ids is None:
query_ids = np.arange(m)
if gallery_ids is None:
gallery_ids = np.arange(n)
if query_cams is None:
query_cams = np.zeros(m).astype(np.int32)
if gallery_cams is None:
gallery_cams = np.ones(n).astype(np.int32)
# Ensure numpy array
query_ids = np.asarray(query_ids)
gallery_ids = np.asarray(gallery_ids)
query_cams = np.asarray(query_cams)
gallery_cams = np.asarray(gallery_cams)
# Sort and find correct matches
indices = np.argsort(distmat, axis=1)
matches = (gallery_ids[indices] == query_ids[:, np.newaxis])
# Compute AP for each query
aps = []
if (FLAGS.probe_view != '' and FLAGS.probe_view == FLAGS.gallery_view) or (FLAGS.probe_type == 'nm.nm' or FLAGS.probe_type == 'cl.cl' or FLAGS.probe_type == 'bg.bg'):
for i in range(1, m):
valid = ((gallery_ids[indices[i]] != query_ids[i]) |
(gallery_cams[indices[i]] != query_cams[i]))
y_true = matches[i, valid]
y_score = -distmat[i][indices[i]][valid]
if not np.any(y_true): continue
aps.append(average_precision_score(y_true, y_score))
else:
for i in range(m):
valid = ((gallery_ids[indices[i]] != query_ids[i]) |
(gallery_cams[indices[i]] != query_cams[i]))
y_true = matches[i, valid]
y_score = -distmat[i][indices[i]][valid]
if not np.any(y_true): continue
aps.append(average_precision_score(y_true, y_score))
if len(aps) == 0:
raise RuntimeError("No valid query")
return np.mean(aps)
def metrics(X, y, t_X, t_y):
a, b = torch.from_numpy(t_X), torch.from_numpy(X)
# compute Euclidean distance
m, n = a.size(0), b.size(0)
a = a.view(m, -1)
b = b.view(n, -1)
dist_m = torch.pow(a, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(b, 2).sum(dim=1, keepdim=True).expand(n, m).t()
dist_m.addmm_(1, -2, a, b.t())
dist_m = dist_m.sqrt()
mAP = mean_ap(distmat=dist_m.numpy(), query_ids=t_y, gallery_ids=y)
_, dist_sort = dist_m.sort(1)
dist_sort = dist_sort.numpy()
top_1 = top_5 = top_10 = 0
probe_num = dist_sort.shape[0]
if (FLAGS.probe_view != '' and FLAGS.probe_view == FLAGS.gallery_view) or (FLAGS.probe_type == 'nm.nm' or FLAGS.probe_type == 'cl.cl' or FLAGS.probe_type == 'bg.bg'):
for i in range(probe_num):
# print(dist_sort[i, :10])
if t_y[i] in y[dist_sort[i, 1:2]]:
top_1 += 1
if t_y[i] in y[dist_sort[i, 1:6]]:
top_5 += 1
if t_y[i] in y[dist_sort[i, 1:11]]:
top_10 += 1
else:
for i in range(probe_num):
# print(dist_sort[i, :10])
if t_y[i] in y[dist_sort[i, :1]]:
top_1 += 1
if t_y[i] in y[dist_sort[i, :5]]:
top_5 += 1
if t_y[i] in y[dist_sort[i, :10]]:
top_10 += 1
return mAP, top_1 / probe_num, top_5 / probe_num, top_10 / probe_num
mAP, top_1, top_5, top_10 = metrics(X, y, t_X, t_y)
return mAP, top_1, top_5, top_10
max_acc_1 = 0
max_acc_2 = 0
best_cluster_info_1 = [0, 0]
best_cluster_info_2 = [0, 0]
cur_patience = 0
if dataset == 'KGBD' or dataset == 'KS20':
if FLAGS.gallery_view == '' and FLAGS.probe_view == '':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='gallery', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, _, _, _, _, _, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='view_' + FLAGS.gallery_view, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'BIWI':
if probe == 'Walking':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='Still', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='Walking', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'IAS':
if probe == 'A':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='B', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
)
else:
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split='A', time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att,
batch_size=batch_size,
)
elif dataset == 'CASIA_B':
_, X_train_P, X_train_B, X_train_H_B, _, y_train, _, X_gal_P, X_gal_B, X_gal_H_B, _, y_gal, \
_, _, adj_P, biases_P, adj_B, biases_B, adj_H_B, biases_H_B, _, _, nb_classes = \
process.gen_train_data(dataset=dataset, split=probe, time_step=time_step,
nb_nodes=nb_nodes, nhood=nhood, global_att=global_att, batch_size=batch_size,
PG_type=FLAGS.probe_type.split('.')[1])
for epoch in range(cluster_epochs):
train_features_all, train_labels_all = train_loader(X_train_P, X_train_B, X_train_H_B, y_train)
# train_features_all = train_features_all.numpy()
gal_features_all, gal_labels_all = gal_loader(X_gal_P, X_gal_B, X_gal_H_B, y_gal)
mAP, top_1, top_5, top_10 = evaluation()
cur_patience += 1
if epoch > 0 and top_1 > max_acc_2:
max_acc_1 = mAP
best_cluster_info_1[0] = num_cluster
best_cluster_info_1[1] = outlier_num
cur_patience = 0
if FLAGS.mode == 'DG' and FLAGS.S_dataset != '':
if FLAGS.probe_view == '' and FLAGS.gallery_view == '':
# checkpt_file = pre_dir + dataset + '/' + probe + '_' + str(fusion_lambda) + '_' + str(
# nhood) + '_' + str(
# time_step) + '_' + FLAGS.min_samples + '_' + FLAGS.lr + '_' + FLAGS.eps + '_' + \
# FLAGS.density_lambda + '_' + change + '_best.ckpt'
checkpt_file = pre_dir + dataset + '/' + probe + change + '_best.ckpt'
else:
checkpt_file = pre_dir + dataset + '/' + probe + '_' + FLAGS.probe_view + 'v' + \
FLAGS.gallery_view + change + '_best.ckpt'
print(checkpt_file)
saver.save(sess, checkpt_file)
if epoch > 0 and top_1 > max_acc_2:
max_acc_2 = top_1
best_cluster_info_2[0] = num_cluster
best_cluster_info_2[1] = outlier_num
cur_patience = 0
if FLAGS.evaluate == '1':
print(
'[Evaluate on %s - %s] | mAP: %.4f | Top-1: %.4f | Top-5: %.4f | Top-10: %.4f' % (
FLAGS.dataset, FLAGS.probe, mAP,
top_1, top_5, top_10))
exit()
else:
if FLAGS.probe_view != '' and FLAGS.gallery_view != '':
print(
'[DG] View: %s v %s | mAP: %.4f (%.4f) | Top-1: %.4f (%.4f) | Top-5: %.4f | Top-10: %.4f | % d + o: %d |' % (
FLAGS.probe_view, FLAGS.gallery_view, mAP, max_acc_1,
top_1, max_acc_2, top_5, top_10,
best_cluster_info_2[0], best_cluster_info_2[1]))
else:
print(
'[DG] %s - %s | mAP: %.4f (%.4f) | Top-1: %.4f (%.4f) | Top-5: %.4f | Top-10: %.4f | % d + o: %d |' % (
FLAGS.dataset, FLAGS.probe, mAP, max_acc_1,
top_1, max_acc_2, top_5, top_10,
best_cluster_info_2[0], best_cluster_info_2[1]))
if cur_patience == patience:
break
rerank_dist = compute_jaccard_distance(train_features_all, k1=k1, k2=k2)
if dataset == 'IAS' or dataset == 'KS20':
cluster = DBSCAN(eps=eps, min_samples=min_samples, metric='precomputed', n_jobs=-1)
else:
cluster = DBSCAN(eps=eps, min_samples=min_samples, metric='precomputed', n_jobs=-1)
pseudo_labels = cluster.fit_predict(rerank_dist)
# discard outliers
train_features_all = train_features_all[np.where(pseudo_labels != -1)]
X_train_P_new = X_train_P[np.where(pseudo_labels != -1)]
X_train_B_new = X_train_B[np.where(pseudo_labels != -1)]
X_train_H_B_new = X_train_H_B[np.where(pseudo_labels != -1)]
outlier_num = np.sum(pseudo_labels == -1)
pseudo_labels = pseudo_labels[np.where(pseudo_labels != -1)]
# print(pseudo_labels)
num_cluster = len(set(pseudo_labels)) - (1 if -1 in pseudo_labels else 0)
def generate_cluster_features(labels, features):
centers = collections.defaultdict(list)
for i, label in enumerate(labels):
if label == -1:
continue
centers[labels[i]].append(features[i])
centers = [
torch.stack(centers[idx], dim=0).mean(0) for idx in sorted(centers.keys())
]
# print(centers)
centers = torch.stack(centers, dim=0)
return centers
cluster_features = generate_cluster_features(pseudo_labels, train_features_all)
cluster_features = cluster_features.numpy()
cluster_features = cluster_features.astype(np.float64)
tr_step = 0
tr_size = X_train_P_new.shape[0]
# pro_en_P = []
# pro_en_B = []
while tr_step * batch_size < tr_size:
if (tr_step + 1) * batch_size > tr_size:
break
X_input_P = X_train_P_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_P = X_input_P.reshape([-1, 10, 3])
X_input_B = X_train_B_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_B = X_input_B.reshape([-1, 5, 3])
X_input_H_B = X_train_H_B_new[tr_step * batch_size:(tr_step + 1) * batch_size]
X_input_H_B = X_input_H_B.reshape([-1, 3, 3])
labels = pseudo_labels[tr_step * batch_size:(tr_step + 1) * batch_size]
_, loss, P_en, B_en, all_features = sess.run(
[cluster_train_op, contrastive_loss, P_encode, B_encode, all_ftr],
feed_dict={
P_in: X_input_P,
B_in: X_input_B,
H_B_in: X_input_H_B,
P_bias_in: biases_P,
B_bias_in: biases_B,
H_B_bias_in: biases_H_B,
is_train: True,
attn_drop: 0.0, ffd_drop: 0.0,
pseudo_lab: labels,
cluster_ftr: cluster_features})
if tr_step % display == 0:
print('[%s] Batch num: %d | Cluser num: %d | Outlier: %d | Loss: %.5f |' %
(str(epoch), tr_step, num_cluster, outlier_num, loss))
tr_step += 1
sess.close()
print('----- Model hyperparams -----')
# print('skeleton_nodes: ' + str(nb_nodes))
print('seqence_length: ' + str(time_step))
print('fusion_lambda: ' + str(fusion_lambda))
print('batch_size: ' + str(batch_size))
print('lr: ' + str(FLAGS.lr))
print('temperature: ' + FLAGS.t)
print('eps: ' + FLAGS.eps)
print('min_samples: ' + FLAGS.min_samples)
print('m: ' + FLAGS.m)
print('fusion_lambda: ' + FLAGS.fusion_lambda)
print('patience: ' + FLAGS.patience)
print('Mode: ' + FLAGS.mode)
if FLAGS.mode == 'DG':
print('----- Mode Information -----')
print('Source Dataset: ' + FLAGS.S_dataset)
print('Target Dataset: ' + FLAGS.dataset)
print('Target Probe: ' + FLAGS.probe)
elif FLAGS.mode == 'UF':
print('----- Dataset Information -----')
print('Dataset: ' + dataset)
print('Probe: ' + FLAGS.probe)
| 44.916801
| 174
| 0.594493
| 8,550
| 55,607
| 3.484211
| 0.056959
| 0.014032
| 0.007385
| 0.011816
| 0.811245
| 0.793152
| 0.775797
| 0.76519
| 0.746358
| 0.727593
| 0
| 0.024649
| 0.277717
| 55,607
| 1,237
| 175
| 44.953112
| 0.71706
| 0.052673
| 0
| 0.70974
| 0
| 0.007715
| 0.064143
| 0.004119
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0.001929
| 0.014465
| 0.000964
| 0.047252
| 0.047252
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f77801de93147bdce10893641ce3a3608466b4c9
| 15,704
|
py
|
Python
|
tests/test_views.py
|
marianaasilva/drf-friendly-errors
|
7ea96dc53b6f8ab2632d00339b4b2f8414e7ba40
|
[
"MIT"
] | 8
|
2019-03-15T18:08:58.000Z
|
2020-11-17T08:24:32.000Z
|
tests/test_views.py
|
marianaasilva/drf-friendly-errors
|
7ea96dc53b6f8ab2632d00339b4b2f8414e7ba40
|
[
"MIT"
] | null | null | null |
tests/test_views.py
|
marianaasilva/drf-friendly-errors
|
7ea96dc53b6f8ab2632d00339b4b2f8414e7ba40
|
[
"MIT"
] | 1
|
2020-04-23T14:59:06.000Z
|
2020-04-23T14:59:06.000Z
|
from django.core.urlresolvers import reverse
from rest_framework.test import APIRequestFactory
from rest_framework_friendly_errors import settings
from tests import BaseTestCase
from tests.models import Snippet
from tests.views import SnippetList, Snippet2List, SnippetDetail
class ListViewTestCase(BaseTestCase):
def setUp(self):
super(ListViewTestCase, self).setUp()
self.factory = APIRequestFactory()
def test_empty_list_view(self):
request = self.factory.get(reverse('snippet-list'))
response = SnippetList.as_view()(request)
self.assertEqual(response.data, [])
self.assertEqual(response.status_code, 200)
def test_create_a_valid_snippet(self):
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 201)
def test_invalid_boolean(self):
self.data_set['linenos'] = 'A text instead of a bool'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['BooleanField']['invalid']
self.assertEqual(int(response.data['linenos'][0]['code']), code)
def test_invalid_char_field(self):
# Too long string
self.data_set['title'] = 'Too Long Title For Defined Serializer'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['max_length']
self.assertEqual(int(response.data['title'][0]['code']), code)
# Empty string
self.data_set['title'] = ''
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['blank']
self.assertEqual(int(response.data['title'][0]['code']), code)
# No data provided
self.data_set.pop('title')
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['required']
self.assertEqual(int(response.data['title'][0]['code']), code)
def test_invalid_choice_field(self):
# invalid choice
self.data_set['language'] = 'brainfuck'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['invalid_choice']
self.assertEqual(int(response.data['language'][0]['code']), code)
# empty string
self.data_set['language'] = ''
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['invalid_choice']
self.assertEqual(int(response.data['language'][0]['code']), code)
# no data provided
self.data_set.pop('language')
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['required']
self.assertEqual(int(response.data['language'][0]['code']), code)
def test_invalid_decimal_field(self):
# invalid
self.data_set['rating'] = 'text instead of float'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['invalid']
self.assertEqual(int(response.data['rating'][0]['code']), code)
# decimal places
self.data_set['rating'] = 2.99
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['max_decimal_places']
self.assertEqual(int(response.data['rating'][0]['code']), code)
# decimal max digits
self.data_set['rating'] = 222.9
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['max_digits']
self.assertEqual(int(response.data['rating'][0]['code']), code)
def test_datetime_field_error_content(self):
# invalid
self.data_set['posted_date'] = 'text instead of date'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DateTimeField']['invalid']
self.assertEqual(int(response.data['posted_date'][0]['code']), code)
def test_custom_field_validation_method(self):
self.data_set['comment'] = 'comment'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
self.assertEqual(int(response.data['comment'][0]['code']), 5000)
def test_custom_field_validation_using_validators(self):
self.data_set['title'] = 'A title'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
self.assertEqual(int(response.data['title'][0]['code']), 5001)
def test_field_dependency_validation(self):
self.data_set['title'] = 'A Python'
self.data_set['language'] = 'c++'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
self.assertEqual(int(response.data['non_field_errors'][0]['code']), 8000)
def test_error_registration(self):
self.data_set['title'] = 'A Python'
self.data_set['language'] = 'c++'
request = self.factory.post(reverse('snippet2-list'), data=self.data_set)
response = Snippet2List.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['invalid_choice']
self.assertEqual(
int(response.data['language'][0]['code']), code
)
def test_couple_errors(self):
self.data_set['comment'] = 'comment'
self.data_set['rating'] = 'Not a number at all'
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(response.data), 2)
def test_unique_constraint(self):
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
SnippetList.as_view()(request)
request = self.factory.post(reverse('snippet-list'), data=self.data_set)
response = SnippetList.as_view()(request)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_VALIDATOR_ERRORS['UniqueValidator']
self.assertEqual(int(response.data['watermark'][0]['code']), code)
class DetailViewTestCase(BaseTestCase):
def setUp(self):
super(DetailViewTestCase, self).setUp()
self.factory = APIRequestFactory()
self.snippet = Snippet.objects.create(**self.data_set)
def test_retrieve_object(self):
request = self.factory.get(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}))
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 200)
def test_update_snippet(self):
self.data_set['code'] = 'def foo(bar):\n\treturn bar'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['code'], 'def foo(bar):\n\treturn bar')
def update_invalid_boolean(self):
self.data_set['linenos'] = 'A text instead of a bool'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['BooleanField']['invalid']
self.assertEqual(response.data['linenos'][0]['code'], code)
def test_upload_invalid_char_field(self):
# Too long string
self.data_set['title'] = 'Too Long Title For Defined Serializer'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['max_length']
self.assertEqual(int(response.data['title'][0]['code']), code)
# Empty string
self.data_set['title'] = ''
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['blank']
self.assertEqual(int(response.data['title'][0]['code']), code)
# No data provided
self.data_set.pop('title')
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['CharField']['required']
self.assertEqual(int(response.data['title'][0]['code']), code)
def test_upload_invalid_choice_field(self):
# invalid choice
self.data_set['language'] = 'brainfuck'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['invalid_choice']
self.assertEqual(int(response.data['language'][0]['code']), code)
# empty string
self.data_set['language'] = ''
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['invalid_choice']
self.assertEqual(int(response.data['language'][0]['code']), code)
# no data provided
self.data_set.pop('language')
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['ChoiceField']['required']
self.assertEqual(int(response.data['language'][0]['code']), code)
def test_upload_invalid_decimal_field(self):
# invalid
self.data_set['rating'] = 'text instead of float'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['invalid']
self.assertEqual(int(response.data['rating'][0]['code']), code)
# decimal places
self.data_set['rating'] = 2.99
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['max_decimal_places']
self.assertEqual(int(response.data['rating'][0]['code']), code)
# decimal max digits
self.data_set['rating'] = 222.9
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DecimalField']['max_digits']
self.assertEqual(int(response.data['rating'][0]['code']), code)
def test_datetime_field_error_content(self):
# invalid
self.data_set['posted_date'] = 'text instead of date'
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_FIELD_ERRORS['DateTimeField']['invalid']
self.assertEqual(int(response.data['posted_date'][0]['code']), code)
def test_cannot_update_to_not_unique_watermark(self):
self.data_set['watermark'] = 'TEST2'
Snippet.objects.create(**self.data_set)
request = self.factory.put(reverse('snippet-detail',
kwargs={'pk': self.snippet.pk}),
data=self.data_set)
response = SnippetDetail.as_view()(request, pk=self.snippet.pk)
self.assertEqual(response.status_code, 400)
code = settings.FRIENDLY_VALIDATOR_ERRORS['UniqueValidator']
self.assertEqual(int(response.data['watermark'][0]['code']), code)
| 49.539432
| 83
| 0.629457
| 1,777
| 15,704
| 5.408554
| 0.077096
| 0.054937
| 0.075538
| 0.099573
| 0.919051
| 0.900219
| 0.882426
| 0.8689
| 0.845386
| 0.845386
| 0
| 0.013158
| 0.235354
| 15,704
| 316
| 84
| 49.696203
| 0.787225
| 0.017894
| 0
| 0.792157
| 0
| 0
| 0.120439
| 0
| 0
| 0
| 0
| 0
| 0.25098
| 1
| 0.090196
| false
| 0
| 0.023529
| 0
| 0.121569
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e392d75b703f42ec16ec70e71c68835a23dfe55a
| 232
|
py
|
Python
|
tests/test_configuration_schema.py
|
liads/hacs-integration
|
b4c8ba11509d5dfcd2c130b8127d11acab5b338c
|
[
"MIT"
] | 1
|
2020-05-09T06:56:38.000Z
|
2020-05-09T06:56:38.000Z
|
tests/test_configuration_schema.py
|
ludeeus/integration
|
05c05291de5c938ce122f1d48c542938bceef83e
|
[
"MIT"
] | null | null | null |
tests/test_configuration_schema.py
|
ludeeus/integration
|
05c05291de5c938ce122f1d48c542938bceef83e
|
[
"MIT"
] | null | null | null |
"""HACS configuration schema Test Suite."""
# pylint: disable=missing-docstring
from custom_components.hacs.configuration_schema import hacs_config_combined
def test_combined():
assert isinstance(hacs_config_combined(), dict)
| 29
| 76
| 0.810345
| 28
| 232
| 6.464286
| 0.678571
| 0.187845
| 0.254144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099138
| 232
| 7
| 77
| 33.142857
| 0.866029
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
540e51b785ce8c5473ae1567afca0ae658c8df0d
| 130
|
py
|
Python
|
examples/bug45.py
|
dimanil/fast_request
|
39f6769e15474aea1aa3aced6bb07a817a2df3ba
|
[
"MIT"
] | 857
|
2018-11-18T17:55:01.000Z
|
2022-03-31T23:39:10.000Z
|
examples/bug45.py
|
dimanil/fast_request
|
39f6769e15474aea1aa3aced6bb07a817a2df3ba
|
[
"MIT"
] | 181
|
2018-12-08T18:31:05.000Z
|
2022-03-29T01:40:02.000Z
|
examples/bug45.py
|
dimanil/fast_request
|
39f6769e15474aea1aa3aced6bb07a817a2df3ba
|
[
"MIT"
] | 92
|
2018-11-22T03:53:31.000Z
|
2022-03-21T10:54:24.000Z
|
import faster_than_requests as requests
print(requests.head("https://google.com"))
print(requests.head("http://18.204.81.190"))
| 21.666667
| 44
| 0.753846
| 20
| 130
| 4.8
| 0.75
| 0.270833
| 0.354167
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081967
| 0.061538
| 130
| 5
| 45
| 26
| 0.704918
| 0
| 0
| 0
| 0
| 0
| 0.292308
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
58376735ed9895e9498adb30463a7e24501816d2
| 84
|
py
|
Python
|
thenewboston_node/core/tests/fixtures/__init__.py
|
nishp77/thenewboston-node
|
158b1f1739b2c6c9c21c80e9da854ca141f1cf8f
|
[
"MIT"
] | null | null | null |
thenewboston_node/core/tests/fixtures/__init__.py
|
nishp77/thenewboston-node
|
158b1f1739b2c6c9c21c80e9da854ca141f1cf8f
|
[
"MIT"
] | null | null | null |
thenewboston_node/core/tests/fixtures/__init__.py
|
nishp77/thenewboston-node
|
158b1f1739b2c6c9c21c80e9da854ca141f1cf8f
|
[
"MIT"
] | null | null | null |
from .clients import * # noqa: F401, F403
from .mocks import * # noqa: F401, F403
| 28
| 42
| 0.666667
| 12
| 84
| 4.666667
| 0.583333
| 0.357143
| 0.5
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0.214286
| 84
| 2
| 43
| 42
| 0.666667
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
587588474ff59b80639d51df45cab2014ece7b2a
| 3,825
|
py
|
Python
|
Calculator Printer.py
|
monkee52/NCSSChallenge
|
e8849085e0578268dc5ce022b39c7d499884d810
|
[
"BSD-2-Clause"
] | null | null | null |
Calculator Printer.py
|
monkee52/NCSSChallenge
|
e8849085e0578268dc5ce022b39c7d499884d810
|
[
"BSD-2-Clause"
] | null | null | null |
Calculator Printer.py
|
monkee52/NCSSChallenge
|
e8849085e0578268dc5ce022b39c7d499884d810
|
[
"BSD-2-Clause"
] | null | null | null |
# Enter your code for "Calculator Printer" here.
x = input("Number: ")
y = int(input("Width: "))
sx = y + 2
sy = 2 * y + 3
o = []
for i in x:
if i == "0":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "1":
p = []
p.append(" " + (" " * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
o.append(p)
elif i == "2":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + " ")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "3":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "4":
p = []
p.append(" " + (" " * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
o.append(p)
elif i == "5":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + " ")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "6":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + " ")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "7":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + (" " * y) + " ")
o.append(p)
elif i == "8":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
elif i == "9":
p = []
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append("|" + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
for j in range(y):
p.append(" " + (" " * y) + "|")
p.append(" " + ("-" * y) + " ")
o.append(p)
for i in range(sy):
x = []
for j in range(len(o)):
x.append(o[j][i])
print(" ".join(x))
| 22.633136
| 48
| 0.239739
| 366
| 3,825
| 2.505464
| 0.101093
| 0.381679
| 0.436205
| 0.392585
| 0.845147
| 0.845147
| 0.845147
| 0.845147
| 0.845147
| 0.845147
| 0
| 0.006886
| 0.506405
| 3,825
| 168
| 49
| 22.767857
| 0.478814
| 0.012026
| 0
| 0.810811
| 0
| 0
| 0.046598
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.009009
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.